diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index c87fb6a17d..63b5022ae8 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs @@ -28,10 +28,8 @@ static TOKEN_LIMIT: Limit = Limit::new(524_288); #[derive(Debug, Clone, Eq, PartialEq)] pub enum TokenExpander { - /// Old-style `macro_rules`. - MacroRules { mac: mbe::MacroRules, def_site_token_map: mbe::TokenMap }, - /// AKA macros 2.0. - MacroDef { mac: mbe::MacroDef, def_site_token_map: mbe::TokenMap }, + /// Old-style `macro_rules` or the new macros 2.0 + DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap }, /// Stuff like `line!` and `file!`. Builtin(BuiltinFnLikeExpander), /// `global_allocator` and such. @@ -50,8 +48,7 @@ impl TokenExpander { tt: &tt::Subtree, ) -> mbe::ExpandResult { match self { - TokenExpander::MacroRules { mac, .. } => mac.expand(tt), - TokenExpander::MacroDef { mac, .. } => mac.expand(tt), + TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt), TokenExpander::Builtin(it) => it.expand(db, id, tt), TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt), TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt), @@ -66,8 +63,7 @@ impl TokenExpander { pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { match self { - TokenExpander::MacroRules { mac, .. } => mac.map_id_down(id), - TokenExpander::MacroDef { mac, .. } => mac.map_id_down(id), + TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id), TokenExpander::Builtin(..) | TokenExpander::BuiltinAttr(..) | TokenExpander::BuiltinDerive(..) @@ -77,8 +73,7 @@ impl TokenExpander { pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { match self { - TokenExpander::MacroRules { mac, .. } => mac.map_id_up(id), - TokenExpander::MacroDef { mac, .. } => mac.map_id_up(id), + TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id), TokenExpander::Builtin(..) | TokenExpander::BuiltinAttr(..) | TokenExpander::BuiltinDerive(..) @@ -368,24 +363,27 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option { fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result, mbe::ParseError> { match id.kind { - MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { - ast::Macro::MacroRules(macro_rules) => { - let arg = macro_rules - .token_tree() - .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?; - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); - let mac = mbe::MacroRules::parse(&tt)?; - Ok(Arc::new(TokenExpander::MacroRules { mac, def_site_token_map })) - } - ast::Macro::MacroDef(macro_def) => { - let arg = macro_def - .body() - .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?; - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); - let mac = mbe::MacroDef::parse(&tt)?; - Ok(Arc::new(TokenExpander::MacroDef { mac, def_site_token_map })) - } - }, + MacroDefKind::Declarative(ast_id) => { + let (mac, def_site_token_map) = match ast_id.to_node(db) { + ast::Macro::MacroRules(macro_rules) => { + let arg = macro_rules + .token_tree() + .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?; + let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt)?; + (mac, def_site_token_map) + } + ast::Macro::MacroDef(macro_def) => { + let arg = macro_def + .body() + .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?; + let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let mac = mbe::DeclarativeMacro::parse_macro2(&tt)?; + (mac, def_site_token_map) + } + }; + Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map })) + } MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))), MacroDefKind::BuiltInAttr(expander, _) => { Ok(Arc::new(TokenExpander::BuiltinAttr(expander))) diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index b2879e37c3..51f14d684c 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs @@ -160,11 +160,9 @@ impl HygieneInfo { InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()), ), mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) { - ( - TokenExpander::MacroDef { def_site_token_map, .. } - | TokenExpander::MacroRules { def_site_token_map, .. }, - Some(tt), - ) => (def_site_token_map, *tt), + (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => { + (def_site_token_map, *tt) + } _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), }, }, diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 1fc74cb3b0..b831ad26b4 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs @@ -468,11 +468,9 @@ impl ExpansionInfo { _ => match origin { mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) { - ( - TokenExpander::MacroRules { def_site_token_map, .. } - | TokenExpander::MacroDef { def_site_token_map, .. }, - Some(tt), - ) => (def_site_token_map, tt.syntax().cloned()), + (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => { + (def_site_token_map, tt.syntax().cloned()) + } _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), }, }, diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index cd5d76be3c..a10f0e834d 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ parser::{Op, RepeatKind, Separator}, - syntax_node_to_token_tree, MacroRules, + syntax_node_to_token_tree, DeclarativeMacro, }; #[test] @@ -20,7 +20,7 @@ fn benchmark_parse_macro_rules() { let rules = macro_rules_fixtures_tt(); let hash: usize = { let _pt = bench("mbe parse macro rules"); - rules.values().map(|it| MacroRules::parse(it).unwrap().rules.len()).sum() + rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it).unwrap().rules.len()).sum() }; assert_eq!(hash, 1144); } @@ -47,10 +47,10 @@ fn benchmark_expand_macro_rules() { assert_eq!(hash, 69413); } -fn macro_rules_fixtures() -> FxHashMap { +fn macro_rules_fixtures() -> FxHashMap { macro_rules_fixtures_tt() .into_iter() - .map(|(id, tt)| (id, MacroRules::parse(&tt).unwrap())) + .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt).unwrap())) .collect() } @@ -71,7 +71,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap { } /// Generate random invocation fixtures from rules -fn invocation_fixtures(rules: &FxHashMap) -> Vec<(String, tt::Subtree)> { +fn invocation_fixtures(rules: &FxHashMap) -> Vec<(String, tt::Subtree)> { let mut seed = 123456789; let mut res = Vec::new(); diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 6cd084eaea..498a1b3323 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -83,15 +83,7 @@ pub use crate::{ /// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) #[derive(Clone, Debug, PartialEq, Eq)] -pub struct MacroRules { - rules: Vec, - /// Highest id of the token we have in TokenMap - shift: Shift, -} - -/// For Macro 2.0 -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct MacroDef { +pub struct DeclarativeMacro { rules: Vec, /// Highest id of the token we have in TokenMap shift: Shift, @@ -176,8 +168,9 @@ pub enum Origin { Call, } -impl MacroRules { - pub fn parse(tt: &tt::Subtree) -> Result { +impl DeclarativeMacro { + /// The old, `macro_rules! m {}` flavor. + pub fn parse_macro_rules(tt: &tt::Subtree) -> Result { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -198,30 +191,11 @@ impl MacroRules { validate(&rule.lhs)?; } - Ok(MacroRules { rules, shift: Shift::new(tt) }) + Ok(DeclarativeMacro { rules, shift: Shift::new(tt) }) } - pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult { - // apply shift - let mut tt = tt.clone(); - self.shift.shift_all(&mut tt); - expander::expand_rules(&self.rules, &tt) - } - - pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - self.shift.shift(id) - } - - pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { - match self.shift.unshift(id) { - Some(id) => (id, Origin::Call), - None => (id, Origin::Def), - } - } -} - -impl MacroDef { - pub fn parse(tt: &tt::Subtree) -> Result { + /// The new, unstable `macro m {}` flavor. + pub fn parse_macro2(tt: &tt::Subtree) -> Result { let mut src = TtIter::new(tt); let mut rules = Vec::new(); @@ -251,7 +225,7 @@ impl MacroDef { validate(&rule.lhs)?; } - Ok(MacroDef { rules, shift: Shift::new(tt) }) + Ok(DeclarativeMacro { rules, shift: Shift::new(tt) }) } pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult {