Infallibe ExpandDatabase::macro_def

This commit is contained in:
Lukas Wirth 2023-07-10 16:23:29 +02:00
parent 4ff93398fd
commit d5f64f875a
10 changed files with 215 additions and 164 deletions

View file

@ -20,8 +20,8 @@ use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use expect_test::Expect; use expect_test::Expect;
use hir_expand::{ use hir_expand::{
db::{ExpandDatabase, TokenExpander}, db::{DeclarativeMacroExpander, ExpandDatabase},
AstId, InFile, MacroDefId, MacroDefKind, MacroFile, AstId, InFile, MacroFile,
}; };
use stdx::format_to; use stdx::format_to;
use syntax::{ use syntax::{
@ -100,12 +100,10 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let call_offset = macro_.syntax().text_range().start().into(); let call_offset = macro_.syntax().text_range().start().into();
let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_); let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
let ast_id = AstId::new(source.file_id, file_ast_id.upcast()); let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
let kind = MacroDefKind::Declarative(ast_id);
let macro_def = db let DeclarativeMacroExpander { mac, def_site_token_map } =
.macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false }) &*db.decl_macro_expander(krate, ast_id);
.unwrap(); assert_eq!(mac.err(), None);
if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
let tt = match &macro_ { let tt = match &macro_ {
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(), ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
ast::Macro::MacroDef(_) => unimplemented!(""), ast::Macro::MacroDef(_) => unimplemented!(""),
@ -126,7 +124,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
format!("// call ids will be shifted by {:?}\n", mac.shift()), format!("// call ids will be shifted by {:?}\n", mac.shift()),
)); ));
} }
}
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let macro_call = InFile::new(source.file_id, &macro_call); let macro_call = InFile::new(source.file_id, &macro_call);

View file

@ -1,6 +1,6 @@
//! Defines database & queries for macro expansion. //! Defines database & queries for macro expansion.
use base_db::{salsa, Edition, SourceDatabase}; use base_db::{salsa, CrateId, Edition, SourceDatabase};
use either::Either; use either::Either;
use limit::Limit; use limit::Limit;
use mbe::syntax_node_to_token_tree; use mbe::syntax_node_to_token_tree;
@ -13,7 +13,7 @@ use triomphe::Arc;
use crate::{ use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander, builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander, MacroDefKind, MacroFile, ProcMacroExpander,
@ -28,60 +28,58 @@ use crate::{
static TOKEN_LIMIT: Limit = Limit::new(1_048_576); static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
/// Old-style `macro_rules` or the new macros 2.0 /// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap }, pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro,
pub def_site_token_map: mbe::TokenMap,
}
impl DeclarativeMacroExpander {
pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(tt).map_err(Into::into),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`. /// Stuff like `line!` and `file!`.
Builtin(BuiltinFnLikeExpander), BuiltIn(BuiltinFnLikeExpander),
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.) /// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
BuiltinEager(EagerExpander), BuiltInEager(EagerExpander),
/// `global_allocator` and such. /// `global_allocator` and such.
BuiltinAttr(BuiltinAttrExpander), BuiltInAttr(BuiltinAttrExpander),
/// `derive(Copy)` and such. /// `derive(Copy)` and such.
BuiltinDerive(BuiltinDeriveExpander), BuiltInDerive(BuiltinDeriveExpander),
/// The thing we love the most here in rust-analyzer -- procedural macros. /// The thing we love the most here in rust-analyzer -- procedural macros.
ProcMacro(ProcMacroExpander), ProcMacro(ProcMacroExpander),
} }
impl TokenExpander { impl TokenExpander {
fn expand(
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into),
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(_) => {
unreachable!("builtin derives should be expanded manually")
}
TokenExpander::ProcMacro(_) => {
unreachable!("ExpandDatabase::expand_proc_macro should be used for proc macros")
}
}
}
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self { match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id), TokenExpander::DeclarativeMacro(expander) => expander.mac.map_id_down(id),
TokenExpander::Builtin(..) TokenExpander::BuiltIn(..)
| TokenExpander::BuiltinEager(..) | TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltinAttr(..) | TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltinDerive(..) | TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id, | TokenExpander::ProcMacro(..) => id,
} }
} }
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self { match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id), TokenExpander::DeclarativeMacro(expander) => expander.mac.map_id_up(id),
TokenExpander::Builtin(..) TokenExpander::BuiltIn(..)
| TokenExpander::BuiltinEager(..) | TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltinAttr(..) | TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltinDerive(..) | TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
} }
} }
@ -124,7 +122,14 @@ pub trait ExpandDatabase: SourceDatabase {
fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
/// Gets the expander for this macro. This compiles declarative macros, and /// Gets the expander for this macro. This compiles declarative macros, and
/// just fetches procedural ones. /// just fetches procedural ones.
fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>; // FIXME: Rename this
#[salsa::transparent]
fn macro_def(&self, id: MacroDefId) -> TokenExpander;
fn decl_macro_expander(
&self,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
/// Expand macro call to a token tree. /// Expand macro call to a token tree.
// This query is LRU cached // This query is LRU cached
@ -162,7 +167,7 @@ pub fn expand_speculative(
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call); let loc = db.lookup_intern_macro_call(actual_macro_call);
let macro_def = db.macro_def(loc.def).ok()?; let macro_def = db.macro_def(loc.def);
let token_range = token_to_map.text_range(); let token_range = token_to_map.text_range();
// Build the subtree and token mapping for the speculative args // Build the subtree and token mapping for the speculative args
@ -239,7 +244,12 @@ pub fn expand_speculative(
let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap) expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
} }
_ => macro_def.expand(db, actual_macro_call, &tt), MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(&tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
}; };
let expand_to = macro_expand_to(db, actual_macro_call); let expand_to = macro_expand_to(db, actual_macro_call);
@ -412,44 +422,55 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
} }
} }
fn macro_def( fn decl_macro_expander(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroDefId, def_crate: CrateId,
) -> Result<Arc<TokenExpander>, mbe::ParseError> { id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let (mac, def_site_token_map) = match id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
};
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
}
fn macro_def(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind { match id.kind {
MacroDefKind::Declarative(ast_id) => { MacroDefKind::Declarative(ast_id) => {
let is_2021 = db.crate_graph()[id.krate].edition >= Edition::Edition2021; TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
let (mac, def_site_token_map) = match ast_id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => {
let arg = macro_rules
.token_tree()
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021)?;
(mac, def_site_token_map)
} }
ast::Macro::MacroDef(macro_def) => { MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
let arg = macro_def MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
.body() MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?; MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021)?;
(mac, def_site_token_map)
}
};
Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
}
MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
MacroDefKind::BuiltInAttr(expander, _) => {
Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
}
MacroDefKind::BuiltInDerive(expander, _) => {
Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
}
MacroDefKind::BuiltInEager(expander, ..) => {
Ok(Arc::new(TokenExpander::BuiltinEager(expander)))
}
MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
} }
} }
@ -483,20 +504,6 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
(expander.expand(db, id, &adt, &tmap), Some((tmap, fixups.undo_info))) (expander.expand(db, id, &adt, &tmap), Some((tmap, fixups.undo_info)))
} }
_ => { _ => {
let expander = match db.macro_def(loc.def) {
Ok(it) => it,
// FIXME: We should make sure to enforce a variant that invalid macro
// definitions do not get expanders that could reach this call path!
Err(err) => {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: vec![],
}),
err: Some(ExpandError::other(format!("invalid macro definition: {err}"))),
}
}
};
let Some(macro_arg) = db.macro_arg(id) else { let Some(macro_arg) = db.macro_arg(id) else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
@ -509,7 +516,15 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}; };
}; };
let (arg, arg_tm, undo_info) = &*macro_arg; let (arg, arg_tm, undo_info) = &*macro_arg;
let mut res = expander.expand(db, id, arg); let mut res = match loc.def.kind {
MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(&arg)
}
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
_ => unreachable!(),
};
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info); fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
(res, None) (res, None)
} }

View file

@ -126,7 +126,7 @@ struct HygieneInfo {
/// The start offset of the `macro_rules!` arguments or attribute input. /// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>, attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: Arc<TokenExpander>, macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift, macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>, exp_map: Arc<mbe::TokenMap>,
@ -159,9 +159,9 @@ impl HygieneInfo {
&self.macro_arg.1, &self.macro_arg.1,
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()), InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
), ),
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) { mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
(TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => { (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(def_site_token_map, *tt) (&expander.def_site_token_map, *tt)
} }
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
}, },
@ -198,7 +198,7 @@ fn make_hygiene_info(
_ => None, _ => None,
}); });
let macro_def = db.macro_def(loc.def).ok()?; let macro_def = db.macro_def(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value; let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| { let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
Arc::new(( Arc::new((

View file

@ -274,7 +274,7 @@ impl HirFileId {
let arg_tt = loc.kind.arg(db)?; let arg_tt = loc.kind.arg(db)?;
let macro_def = db.macro_def(loc.def).ok()?; let macro_def = db.macro_def(loc.def);
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| { let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
Arc::new(( Arc::new((
@ -287,7 +287,7 @@ impl HirFileId {
let def = loc.def.ast_id().left().and_then(|id| { let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) { let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?, ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => { ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
return None return None
} }
ast::Macro::MacroDef(mac) => mac.body()?, ast::Macro::MacroDef(mac) => mac.body()?,
@ -633,7 +633,7 @@ pub struct ExpansionInfo {
/// The `macro_rules!` or attribute input. /// The `macro_rules!` or attribute input.
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>, attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: Arc<TokenExpander>, macro_def: TokenExpander,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
/// and as such we need to shift tokens if they are part of an attributes input instead of their item. /// and as such we need to shift tokens if they are part of an attributes input instead of their item.
@ -780,9 +780,9 @@ impl ExpansionInfo {
} }
_ => match origin { _ => match origin {
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) { mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
(TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => { (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(def_site_token_map, tt.syntax().cloned()) (&expander.def_site_token_map, tt.syntax().cloned())
} }
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
}, },

View file

@ -5,9 +5,9 @@
//! But we need this for at least LRU caching at the query level. //! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*; pub use hir_def::db::*;
pub use hir_expand::db::{ pub use hir_expand::db::{
AstIdMapQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, HygieneFrameQuery, AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgTextQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
}; };
pub use hir_ty::db::*; pub use hir_ty::db::*;

View file

@ -698,9 +698,10 @@ impl Module {
fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) { fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) {
let id = macro_id_to_def_id(db.upcast(), m.id); let id = macro_id_to_def_id(db.upcast(), m.id);
if let Err(e) = db.macro_def(id) { if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_def(id) {
if let Some(e) = expander.mac.err() {
let Some(ast) = id.ast_id().left() else { let Some(ast) = id.ast_id().left() else {
never!("MacroDefError for proc-macro: {:?}", e); never!("declarative expander for non decl-macro: {:?}", e);
return; return;
}; };
emit_def_diagnostic_( emit_def_diagnostic_(
@ -710,6 +711,7 @@ fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>
); );
} }
} }
}
fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) { fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
emit_def_diagnostic_(db, acc, &diag.kind) emit_def_diagnostic_(db, acc, &diag.kind)

View file

@ -100,7 +100,7 @@ impl RootDatabase {
hir::db::ParseMacroExpansionQuery hir::db::ParseMacroExpansionQuery
hir::db::InternMacroCallQuery hir::db::InternMacroCallQuery
hir::db::MacroArgTextQuery hir::db::MacroArgTextQuery
hir::db::MacroDefQuery hir::db::DeclMacroExpanderQuery
hir::db::MacroExpandQuery hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery hir::db::ExpandProcMacroQuery
hir::db::HygieneFrameQuery hir::db::HygieneFrameQuery

View file

@ -201,7 +201,7 @@ impl RootDatabase {
// hir_db::ParseMacroExpansionQuery // hir_db::ParseMacroExpansionQuery
// hir_db::InternMacroCallQuery // hir_db::InternMacroCallQuery
hir_db::MacroArgTextQuery hir_db::MacroArgTextQuery
hir_db::MacroDefQuery hir_db::DeclMacroExpanderQuery
// hir_db::MacroExpandQuery // hir_db::MacroExpandQuery
hir_db::ExpandProcMacroQuery hir_db::ExpandProcMacroQuery
hir_db::HygieneFrameQuery hir_db::HygieneFrameQuery

View file

@ -20,10 +20,7 @@ fn benchmark_parse_macro_rules() {
let rules = macro_rules_fixtures_tt(); let rules = macro_rules_fixtures_tt();
let hash: usize = { let hash: usize = {
let _pt = bench("mbe parse macro rules"); let _pt = bench("mbe parse macro rules");
rules rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it, true).rules.len()).sum()
.values()
.map(|it| DeclarativeMacro::parse_macro_rules(it, true).unwrap().rules.len())
.sum()
}; };
assert_eq!(hash, 1144); assert_eq!(hash, 1144);
} }
@ -53,7 +50,7 @@ fn benchmark_expand_macro_rules() {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> { fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
macro_rules_fixtures_tt() macro_rules_fixtures_tt()
.into_iter() .into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true).unwrap())) .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect() .collect()
} }

View file

@ -132,6 +132,7 @@ pub struct DeclarativeMacro {
// This is used for correctly determining the behavior of the pat fragment // This is used for correctly determining the behavior of the pat fragment
// FIXME: This should be tracked by hygiene of the fragment identifier! // FIXME: This should be tracked by hygiene of the fragment identifier!
is_2021: bool, is_2021: bool,
err: Option<Box<ParseError>>,
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
@ -214,65 +215,100 @@ pub enum Origin {
} }
impl DeclarativeMacro { impl DeclarativeMacro {
pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro {
DeclarativeMacro {
rules: Box::default(),
shift: Shift(0),
is_2021,
err: Some(Box::new(err)),
}
}
/// The old, `macro_rules! m {}` flavor. /// The old, `macro_rules! m {}` flavor.
pub fn parse_macro_rules( pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
tt: &tt::Subtree,
is_2021: bool,
) -> Result<DeclarativeMacro, ParseError> {
// Note: this parsing can be implemented using mbe machinery itself, by // Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier. // manually seems easier.
let mut src = TtIter::new(tt); let mut src = TtIter::new(tt);
let mut rules = Vec::new(); let mut rules = Vec::new();
let mut err = None;
while src.len() > 0 { while src.len() > 0 {
let rule = Rule::parse(&mut src, true)?; let rule = match Rule::parse(&mut src, true) {
Ok(it) => it,
Err(e) => {
err = Some(Box::new(e));
break;
}
};
rules.push(rule); rules.push(rule);
if let Err(()) = src.expect_char(';') { if let Err(()) = src.expect_char(';') {
if src.len() > 0 { if src.len() > 0 {
return Err(ParseError::expected("expected `;`")); err = Some(Box::new(ParseError::expected("expected `;`")));
} }
break; break;
} }
} }
for Rule { lhs, .. } in &rules { for Rule { lhs, .. } in &rules {
validate(lhs)?; if let Err(e) = validate(lhs) {
err = Some(Box::new(e));
break;
}
} }
Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 }) DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
} }
/// The new, unstable `macro m {}` flavor. /// The new, unstable `macro m {}` flavor.
pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> Result<DeclarativeMacro, ParseError> { pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
let mut src = TtIter::new(tt); let mut src = TtIter::new(tt);
let mut rules = Vec::new(); let mut rules = Vec::new();
let mut err = None;
if tt::DelimiterKind::Brace == tt.delimiter.kind { if tt::DelimiterKind::Brace == tt.delimiter.kind {
cov_mark::hit!(parse_macro_def_rules); cov_mark::hit!(parse_macro_def_rules);
while src.len() > 0 { while src.len() > 0 {
let rule = Rule::parse(&mut src, true)?; let rule = match Rule::parse(&mut src, true) {
Ok(it) => it,
Err(e) => {
err = Some(Box::new(e));
break;
}
};
rules.push(rule); rules.push(rule);
if let Err(()) = src.expect_any_char(&[';', ',']) { if let Err(()) = src.expect_any_char(&[';', ',']) {
if src.len() > 0 { if src.len() > 0 {
return Err(ParseError::expected("expected `;` or `,` to delimit rules")); err = Some(Box::new(ParseError::expected(
"expected `;` or `,` to delimit rules",
)));
} }
break; break;
} }
} }
} else { } else {
cov_mark::hit!(parse_macro_def_simple); cov_mark::hit!(parse_macro_def_simple);
let rule = Rule::parse(&mut src, false)?; match Rule::parse(&mut src, false) {
Ok(rule) => {
if src.len() != 0 { if src.len() != 0 {
return Err(ParseError::expected("remaining tokens in macro def")); err = Some(Box::new(ParseError::expected("remaining tokens in macro def")));
} }
rules.push(rule); rules.push(rule);
} }
Err(e) => {
for Rule { lhs, .. } in &rules { err = Some(Box::new(e));
validate(lhs)?; }
}
} }
Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 }) for Rule { lhs, .. } in &rules {
if let Err(e) = validate(lhs) {
err = Some(Box::new(e));
break;
}
}
DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
} }
pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> { pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
@ -282,6 +318,10 @@ impl DeclarativeMacro {
expander::expand_rules(&self.rules, &tt, self.is_2021) expander::expand_rules(&self.rules, &tt, self.is_2021)
} }
pub fn err(&self) -> Option<&ParseError> {
self.err.as_deref()
}
pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
self.shift.shift(id) self.shift.shift(id)
} }