Test macros doing edition dependent parsing

This commit is contained in:
Lukas Wirth 2024-07-19 16:39:00 +02:00
parent 2c32ee7cfa
commit 546eb6b530
8 changed files with 92 additions and 62 deletions

View file

@ -1921,3 +1921,59 @@ fn f() {
"#]], "#]],
); );
} }
#[test]
fn test_edition_handling_out() {
check(
r#"
//- /main.rs crate:main deps:old edition:2021
macro_rules! r#try {
($it:expr) => {
$it?
};
}
fn f() {
old::invoke_bare_try!(0);
}
//- /old.rs crate:old edition:2015
#[macro_export]
macro_rules! invoke_bare_try {
($it:expr) => {
try!($it)
};
}
"#,
expect![[r#"
macro_rules! r#try {
($it:expr) => {
$it?
};
}
fn f() {
try!(0);
}
"#]],
);
}
#[test]
fn test_edition_handling_in() {
check(
r#"
//- /main.rs crate:main deps:old edition:2021
fn f() {
old::parse_try_old!(try!{});
}
//- /old.rs crate:old edition:2015
#[macro_export]
macro_rules! parse_try_old {
($it:expr) => {};
}
"#,
expect![[r#"
fn f() {
;
}
"#]],
);
}

View file

@ -348,7 +348,7 @@ fn parse_macro_expansion(
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = tracing::info_span!("parse_macro_expansion").entered(); let _p = tracing::info_span!("parse_macro_expansion").entered();
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let edition = loc.def.edition; let def_edition = loc.def.edition;
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
let mbe::ValueResult { value: (tt, matched_arm), err } = let mbe::ValueResult { value: (tt, matched_arm), err } =
macro_expand(db, macro_file.macro_call_id, loc); macro_expand(db, macro_file.macro_call_id, loc);
@ -359,7 +359,7 @@ fn parse_macro_expansion(
CowArc::Owned(it) => it, CowArc::Owned(it) => it,
}, },
expand_to, expand_to,
edition, def_edition,
); );
rev_token_map.matched_arm = matched_arm; rev_token_map.matched_arm = matched_arm;

View file

@ -129,20 +129,7 @@ impl DeclarativeMacroExpander {
_ => None, _ => None,
} }
}; };
let toolchain = db.toolchain(def_crate); let ctx_edition = |ctx: SyntaxContextId| {
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
let edition = |ctx: SyntaxContextId| {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
if ctx.is_root() { if ctx.is_root() {
crate_graph[def_crate].edition crate_graph[def_crate].edition
@ -165,7 +152,7 @@ impl DeclarativeMacroExpander {
DocCommentDesugarMode::Mbe, DocCommentDesugarMode::Mbe,
); );
mbe::DeclarativeMacro::parse_macro_rules(&tt, edition, new_meta_vars) mbe::DeclarativeMacro::parse_macro_rules(&tt, ctx_edition)
} }
None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected(
"expected a token tree".into(), "expected a token tree".into(),
@ -193,12 +180,7 @@ impl DeclarativeMacroExpander {
DocCommentDesugarMode::Mbe, DocCommentDesugarMode::Mbe,
); );
mbe::DeclarativeMacro::parse_macro2( mbe::DeclarativeMacro::parse_macro2(args.as_ref(), &body, ctx_edition)
args.as_ref(),
&body,
edition,
new_meta_vars,
)
} }
None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected(
"expected a token tree".into(), "expected a token tree".into(),

View file

@ -25,9 +25,7 @@ fn benchmark_parse_macro_rules() {
rules rules
.values() .values()
.map(|it| { .map(|it| {
DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT, true) DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT).rules.len()
.rules
.len()
}) })
.sum() .sum()
}; };
@ -59,9 +57,7 @@ fn benchmark_expand_macro_rules() {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> { fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
macro_rules_fixtures_tt() macro_rules_fixtures_tt()
.into_iter() .into_iter()
.map(|(id, tt)| { .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT)))
(id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT, true))
})
.collect() .collect()
} }

View file

@ -144,9 +144,7 @@ impl DeclarativeMacro {
/// The old, `macro_rules! m {}` flavor. /// The old, `macro_rules! m {}` flavor.
pub fn parse_macro_rules( pub fn parse_macro_rules(
tt: &tt::Subtree<Span>, tt: &tt::Subtree<Span>,
edition: impl Copy + Fn(SyntaxContextId) -> Edition, ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
// FIXME: Remove this once we drop support for rust 1.76 (defaults to true then)
new_meta_vars: bool,
) -> DeclarativeMacro { ) -> DeclarativeMacro {
// Note: this parsing can be implemented using mbe machinery itself, by // Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
@ -156,7 +154,7 @@ impl DeclarativeMacro {
let mut err = None; let mut err = None;
while src.len() > 0 { while src.len() > 0 {
let rule = match Rule::parse(edition, &mut src, new_meta_vars) { let rule = match Rule::parse(ctx_edition, &mut src) {
Ok(it) => it, Ok(it) => it,
Err(e) => { Err(e) => {
err = Some(Box::new(e)); err = Some(Box::new(e));
@ -186,9 +184,7 @@ impl DeclarativeMacro {
pub fn parse_macro2( pub fn parse_macro2(
args: Option<&tt::Subtree<Span>>, args: Option<&tt::Subtree<Span>>,
body: &tt::Subtree<Span>, body: &tt::Subtree<Span>,
edition: impl Copy + Fn(SyntaxContextId) -> Edition, ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
// FIXME: Remove this once we drop support for rust 1.76 (defaults to true then)
new_meta_vars: bool,
) -> DeclarativeMacro { ) -> DeclarativeMacro {
let mut rules = Vec::new(); let mut rules = Vec::new();
let mut err = None; let mut err = None;
@ -197,8 +193,8 @@ impl DeclarativeMacro {
cov_mark::hit!(parse_macro_def_simple); cov_mark::hit!(parse_macro_def_simple);
let rule = (|| { let rule = (|| {
let lhs = MetaTemplate::parse_pattern(edition, args)?; let lhs = MetaTemplate::parse_pattern(ctx_edition, args)?;
let rhs = MetaTemplate::parse_template(edition, body, new_meta_vars)?; let rhs = MetaTemplate::parse_template(ctx_edition, body)?;
Ok(crate::Rule { lhs, rhs }) Ok(crate::Rule { lhs, rhs })
})(); })();
@ -211,7 +207,7 @@ impl DeclarativeMacro {
cov_mark::hit!(parse_macro_def_rules); cov_mark::hit!(parse_macro_def_rules);
let mut src = TtIter::new(body); let mut src = TtIter::new(body);
while src.len() > 0 { while src.len() > 0 {
let rule = match Rule::parse(edition, &mut src, new_meta_vars) { let rule = match Rule::parse(ctx_edition, &mut src) {
Ok(it) => it, Ok(it) => it,
Err(e) => { Err(e) => {
err = Some(Box::new(e)); err = Some(Box::new(e));
@ -264,7 +260,6 @@ impl Rule {
fn parse( fn parse(
edition: impl Copy + Fn(SyntaxContextId) -> Edition, edition: impl Copy + Fn(SyntaxContextId) -> Edition,
src: &mut TtIter<'_, Span>, src: &mut TtIter<'_, Span>,
new_meta_vars: bool,
) -> Result<Self, ParseError> { ) -> Result<Self, ParseError> {
let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
@ -272,7 +267,7 @@ impl Rule {
let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
let lhs = MetaTemplate::parse_pattern(edition, lhs)?; let lhs = MetaTemplate::parse_pattern(edition, lhs)?;
let rhs = MetaTemplate::parse_template(edition, rhs, new_meta_vars)?; let rhs = MetaTemplate::parse_template(edition, rhs)?;
Ok(crate::Rule { lhs, rhs }) Ok(crate::Rule { lhs, rhs })
} }
@ -367,7 +362,7 @@ fn expect_fragment<S: Copy + fmt::Debug>(
) -> ExpandResult<Option<tt::TokenTree<S>>> { ) -> ExpandResult<Option<tt::TokenTree<S>>> {
use ::parser; use ::parser;
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice()); let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
let parser_input = to_parser_input::to_parser_input(&buffer); let parser_input = to_parser_input::to_parser_input(edition, &buffer);
let tree_traversal = entry_point.parse(&parser_input, edition); let tree_traversal = entry_point.parse(&parser_input, edition);
let mut cursor = buffer.begin(); let mut cursor = buffer.begin();
let mut error = false; let mut error = false;

View file

@ -31,15 +31,14 @@ impl MetaTemplate {
edition: impl Copy + Fn(SyntaxContextId) -> Edition, edition: impl Copy + Fn(SyntaxContextId) -> Edition,
pattern: &tt::Subtree<Span>, pattern: &tt::Subtree<Span>,
) -> Result<Self, ParseError> { ) -> Result<Self, ParseError> {
MetaTemplate::parse(edition, pattern, Mode::Pattern, false) MetaTemplate::parse(edition, pattern, Mode::Pattern)
} }
pub(crate) fn parse_template( pub(crate) fn parse_template(
edition: impl Copy + Fn(SyntaxContextId) -> Edition, edition: impl Copy + Fn(SyntaxContextId) -> Edition,
template: &tt::Subtree<Span>, template: &tt::Subtree<Span>,
new_meta_vars: bool,
) -> Result<Self, ParseError> { ) -> Result<Self, ParseError> {
MetaTemplate::parse(edition, template, Mode::Template, new_meta_vars) MetaTemplate::parse(edition, template, Mode::Template)
} }
pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> { pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
@ -50,13 +49,12 @@ impl MetaTemplate {
edition: impl Copy + Fn(SyntaxContextId) -> Edition, edition: impl Copy + Fn(SyntaxContextId) -> Edition,
tt: &tt::Subtree<Span>, tt: &tt::Subtree<Span>,
mode: Mode, mode: Mode,
new_meta_vars: bool,
) -> Result<Self, ParseError> { ) -> Result<Self, ParseError> {
let mut src = TtIter::new(tt); let mut src = TtIter::new(tt);
let mut res = Vec::new(); let mut res = Vec::new();
while let Some(first) = src.peek_n(0) { while let Some(first) = src.peek_n(0) {
let op = next_op(edition, first, &mut src, mode, new_meta_vars)?; let op = next_op(edition, first, &mut src, mode)?;
res.push(op); res.push(op);
} }
@ -161,7 +159,6 @@ fn next_op(
first_peeked: &tt::TokenTree<Span>, first_peeked: &tt::TokenTree<Span>,
src: &mut TtIter<'_, Span>, src: &mut TtIter<'_, Span>,
mode: Mode, mode: Mode,
new_meta_vars: bool,
) -> Result<Op, ParseError> { ) -> Result<Op, ParseError> {
let res = match first_peeked { let res = match first_peeked {
tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
@ -181,14 +178,14 @@ fn next_op(
tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind { tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind {
tt::DelimiterKind::Parenthesis => { tt::DelimiterKind::Parenthesis => {
let (separator, kind) = parse_repeat(src)?; let (separator, kind) = parse_repeat(src)?;
let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; let tokens = MetaTemplate::parse(edition, subtree, mode)?;
Op::Repeat { tokens, separator: separator.map(Arc::new), kind } Op::Repeat { tokens, separator: separator.map(Arc::new), kind }
} }
tt::DelimiterKind::Brace => match mode { tt::DelimiterKind::Brace => match mode {
Mode::Template => { Mode::Template => {
parse_metavar_expr(new_meta_vars, &mut TtIter::new(subtree)).map_err( parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| {
|()| ParseError::unexpected("invalid metavariable expression"), ParseError::unexpected("invalid metavariable expression")
)? })?
} }
Mode::Pattern => { Mode::Pattern => {
return Err(ParseError::unexpected( return Err(ParseError::unexpected(
@ -260,7 +257,7 @@ fn next_op(
tt::TokenTree::Subtree(subtree) => { tt::TokenTree::Subtree(subtree) => {
src.next().expect("first token already peeked"); src.next().expect("first token already peeked");
let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; let tokens = MetaTemplate::parse(edition, subtree, mode)?;
Op::Subtree { tokens, delimiter: subtree.delimiter } Op::Subtree { tokens, delimiter: subtree.delimiter }
} }
}; };
@ -343,7 +340,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
Err(ParseError::InvalidRepeat) Err(ParseError::InvalidRepeat)
} }
fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result<Op, ()> { fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
let func = src.expect_ident()?; let func = src.expect_ident()?;
let args = src.expect_subtree()?; let args = src.expect_subtree()?;
@ -355,18 +352,14 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result
let op = match &func.sym { let op = match &func.sym {
s if sym::ignore == *s => { s if sym::ignore == *s => {
if new_meta_vars { args.expect_dollar()?;
args.expect_dollar()?;
}
let ident = args.expect_ident()?; let ident = args.expect_ident()?;
Op::Ignore { name: ident.sym.clone(), id: ident.span } Op::Ignore { name: ident.sym.clone(), id: ident.span }
} }
s if sym::index == *s => Op::Index { depth: parse_depth(&mut args)? }, s if sym::index == *s => Op::Index { depth: parse_depth(&mut args)? },
s if sym::len == *s => Op::Len { depth: parse_depth(&mut args)? }, s if sym::len == *s => Op::Len { depth: parse_depth(&mut args)? },
s if sym::count == *s => { s if sym::count == *s => {
if new_meta_vars { args.expect_dollar()?;
args.expect_dollar()?;
}
let ident = args.expect_ident()?; let ident = args.expect_ident()?;
let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None }; let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None };
Op::Count { name: ident.sym.clone(), depth } Op::Count { name: ident.sym.clone(), depth }

View file

@ -153,7 +153,7 @@ where
} => TokenBuffer::from_tokens(token_trees), } => TokenBuffer::from_tokens(token_trees),
_ => TokenBuffer::from_subtree(tt), _ => TokenBuffer::from_subtree(tt),
}; };
let parser_input = to_parser_input(&buffer); let parser_input = to_parser_input(edition, &buffer);
let parser_output = entry_point.parse(&parser_input, edition); let parser_output = entry_point.parse(&parser_input, edition);
let mut tree_sink = TtTreeSink::new(buffer.begin()); let mut tree_sink = TtTreeSink::new(buffer.begin());
for event in parser_output.iter() { for event in parser_output.iter() {

View file

@ -3,11 +3,15 @@
use std::fmt; use std::fmt;
use span::Edition;
use syntax::{SyntaxKind, SyntaxKind::*, T}; use syntax::{SyntaxKind, SyntaxKind::*, T};
use tt::buffer::TokenBuffer; use tt::buffer::TokenBuffer;
pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>) -> parser::Input { pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(
edition: Edition,
buffer: &TokenBuffer<'_, S>,
) -> parser::Input {
let mut res = parser::Input::default(); let mut res = parser::Input::default();
let mut current = buffer.begin(); let mut current = buffer.begin();
@ -60,6 +64,10 @@ pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>)
"_" => res.push(T![_]), "_" => res.push(T![_]),
i if i.starts_with('\'') => res.push(LIFETIME_IDENT), i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
_ if ident.is_raw.yes() => res.push(IDENT), _ if ident.is_raw.yes() => res.push(IDENT),
"gen" if !edition.at_least_2024() => res.push(IDENT),
"async" | "await" | "dyn" | "try" if !edition.at_least_2018() => {
res.push(IDENT)
}
text => match SyntaxKind::from_keyword(text) { text => match SyntaxKind::from_keyword(text) {
Some(kind) => res.push(kind), Some(kind) => res.push(kind),
None => { None => {