mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 06:11:35 +00:00
Merge #1810
1810: cleanup dollar handling in expressions r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
73b4fea015
8 changed files with 62 additions and 130 deletions
|
@ -90,7 +90,7 @@ impl HirFileId {
|
|||
})
|
||||
.ok()?;
|
||||
match macro_file.macro_file_kind {
|
||||
MacroFileKind::Items => Some(Parse::to_syntax(mbe::token_tree_to_ast_item_list(&tt))),
|
||||
MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax),
|
||||
MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -76,8 +76,10 @@ impl RawItems {
|
|||
source_map: ImportSourceMap::default(),
|
||||
};
|
||||
if let Some(node) = db.parse_or_expand(file_id) {
|
||||
if let Some(source_file) = ast::SourceFile::cast(node) {
|
||||
if let Some(source_file) = ast::SourceFile::cast(node.clone()) {
|
||||
collector.process_module(None, source_file);
|
||||
} else if let Some(item_list) = ast::MacroItems::cast(node) {
|
||||
collector.process_module(None, item_list);
|
||||
}
|
||||
}
|
||||
(Arc::new(collector.raw_items), Arc::new(collector.source_map))
|
||||
|
|
|
@ -41,8 +41,8 @@ pub enum ExpandError {
|
|||
}
|
||||
|
||||
pub use crate::syntax_bridge::{
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_ast_item_list, token_tree_to_expr,
|
||||
token_tree_to_macro_items, token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty,
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items,
|
||||
token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty,
|
||||
};
|
||||
|
||||
/// This struct contains AST for a single `macro_rules` definition. What might
|
||||
|
|
|
@ -46,25 +46,6 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
|
|||
// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
|
||||
// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
|
||||
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
|
||||
//
|
||||
//
|
||||
|
||||
fn token_tree_to_syntax_node<F>(tt: &tt::Subtree, f: F) -> Result<Parse<SyntaxNode>, ExpandError>
|
||||
where
|
||||
F: Fn(&mut dyn ra_parser::TokenSource, &mut dyn ra_parser::TreeSink),
|
||||
{
|
||||
let tokens = [tt.clone().into()];
|
||||
let buffer = TokenBuffer::new(&tokens);
|
||||
let mut token_source = SubtreeTokenSource::new(&buffer);
|
||||
let mut tree_sink = TtTreeSink::new(buffer.begin());
|
||||
f(&mut token_source, &mut tree_sink);
|
||||
if tree_sink.roots.len() != 1 {
|
||||
return Err(ExpandError::ConversionError);
|
||||
}
|
||||
//FIXME: would be cool to report errors
|
||||
let parse = tree_sink.inner.finish();
|
||||
Ok(parse)
|
||||
}
|
||||
|
||||
fn fragment_to_syntax_node(
|
||||
tt: &tt::Subtree,
|
||||
|
@ -115,17 +96,11 @@ pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStm
|
|||
}
|
||||
|
||||
/// Parses the token tree (result of macro expansion) as a sequence of items
|
||||
pub fn token_tree_to_macro_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
|
||||
pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
|
||||
let parse = fragment_to_syntax_node(tt, Items)?;
|
||||
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
|
||||
}
|
||||
|
||||
/// Parses the token tree (result of macro expansion) as a sequence of items
|
||||
pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> Parse<ast::SourceFile> {
|
||||
let parse = token_tree_to_syntax_node(tt, ra_parser::parse).unwrap();
|
||||
parse.cast().unwrap()
|
||||
}
|
||||
|
||||
impl TokenMap {
|
||||
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
|
||||
let idx = tt.0 as usize;
|
||||
|
|
|
@ -70,7 +70,7 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
|
|||
|
||||
pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems {
|
||||
let expanded = expand(rules, invocation);
|
||||
token_tree_to_macro_items(&expanded).unwrap().tree()
|
||||
token_tree_to_items(&expanded).unwrap().tree()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
|
@ -155,8 +155,8 @@ pub(crate) fn assert_expansion(
|
|||
let expected = text_to_tokentree(&expected);
|
||||
let (expanded_tree, expected_tree) = match kind {
|
||||
MacroKind::Items => {
|
||||
let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree();
|
||||
let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree();
|
||||
let expanded_tree = token_tree_to_items(&expanded).unwrap().tree();
|
||||
let expected_tree = token_tree_to_items(&expected).unwrap().tree();
|
||||
|
||||
(
|
||||
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
||||
|
@ -410,7 +410,7 @@ fn test_expand_to_item_list() {
|
|||
",
|
||||
);
|
||||
let expansion = expand(&rules, "structs!(Foo, Bar);");
|
||||
let tree = token_tree_to_macro_items(&expansion).unwrap().tree();
|
||||
let tree = token_tree_to_items(&expansion).unwrap().tree();
|
||||
assert_eq!(
|
||||
format!("{:#?}", tree.syntax()).trim(),
|
||||
r#"
|
||||
|
|
|
@ -14,20 +14,17 @@ const EXPR_FIRST: TokenSet = LHS_FIRST;
|
|||
|
||||
pub(super) fn expr(p: &mut Parser) -> BlockLike {
|
||||
let r = Restrictions { forbid_structs: false, prefer_stmt: false };
|
||||
let mut dollar_lvl = 0;
|
||||
expr_bp(p, r, 1, &mut dollar_lvl).1
|
||||
expr_bp(p, r, 1).1
|
||||
}
|
||||
|
||||
pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) {
|
||||
let r = Restrictions { forbid_structs: false, prefer_stmt: true };
|
||||
let mut dollar_lvl = 0;
|
||||
expr_bp(p, r, 1, &mut dollar_lvl)
|
||||
expr_bp(p, r, 1)
|
||||
}
|
||||
|
||||
fn expr_no_struct(p: &mut Parser) {
|
||||
let r = Restrictions { forbid_structs: true, prefer_stmt: false };
|
||||
let mut dollar_lvl = 0;
|
||||
expr_bp(p, r, 1, &mut dollar_lvl);
|
||||
expr_bp(p, r, 1);
|
||||
}
|
||||
|
||||
// test block
|
||||
|
@ -257,23 +254,8 @@ fn current_op(p: &Parser) -> (u8, SyntaxKind) {
|
|||
}
|
||||
|
||||
// Parses expression with binding power of at least bp.
|
||||
fn expr_bp(
|
||||
p: &mut Parser,
|
||||
r: Restrictions,
|
||||
mut bp: u8,
|
||||
dollar_lvl: &mut usize,
|
||||
) -> (Option<CompletedMarker>, BlockLike) {
|
||||
// `newly_dollar_open` is a flag indicated that dollar is just closed after lhs, e.g.
|
||||
// `$1$ + a`
|
||||
// We use this flag to skip handling it.
|
||||
let mut newly_dollar_open = if p.at_l_dollar() {
|
||||
*dollar_lvl += p.eat_l_dollars();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let mut lhs = match lhs(p, r, dollar_lvl) {
|
||||
fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) {
|
||||
let mut lhs = match lhs(p, r) {
|
||||
Some((lhs, blocklike)) => {
|
||||
// test stmt_bin_expr_ambiguity
|
||||
// fn foo() {
|
||||
|
@ -289,15 +271,6 @@ fn expr_bp(
|
|||
};
|
||||
|
||||
loop {
|
||||
if *dollar_lvl > 0 && p.at_r_dollar() {
|
||||
*dollar_lvl -= p.eat_r_dollars(*dollar_lvl);
|
||||
if !newly_dollar_open {
|
||||
// We "pump" bp for make it highest priority
|
||||
bp = 255;
|
||||
}
|
||||
newly_dollar_open = false;
|
||||
}
|
||||
|
||||
let is_range = p.at(T![..]) || p.at(T![..=]);
|
||||
let (op_bp, op) = current_op(p);
|
||||
if op_bp < bp {
|
||||
|
@ -306,7 +279,7 @@ fn expr_bp(
|
|||
let m = lhs.precede(p);
|
||||
p.bump(op);
|
||||
|
||||
expr_bp(p, r, op_bp + 1, dollar_lvl);
|
||||
expr_bp(p, r, op_bp + 1);
|
||||
lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
|
||||
}
|
||||
(Some(lhs), BlockLike::NotBlock)
|
||||
|
@ -314,11 +287,7 @@ fn expr_bp(
|
|||
|
||||
const LHS_FIRST: TokenSet = atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOT, MINUS]);
|
||||
|
||||
fn lhs(
|
||||
p: &mut Parser,
|
||||
r: Restrictions,
|
||||
dollar_lvl: &mut usize,
|
||||
) -> Option<(CompletedMarker, BlockLike)> {
|
||||
fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
|
||||
let m;
|
||||
let kind = match p.current() {
|
||||
// test ref_expr
|
||||
|
@ -351,7 +320,7 @@ fn lhs(
|
|||
m = p.start();
|
||||
p.bump(op);
|
||||
if p.at_ts(EXPR_FIRST) {
|
||||
expr_bp(p, r, 2, dollar_lvl);
|
||||
expr_bp(p, r, 2);
|
||||
}
|
||||
return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
|
||||
}
|
||||
|
@ -367,7 +336,7 @@ fn lhs(
|
|||
return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block())));
|
||||
}
|
||||
};
|
||||
expr_bp(p, r, 255, dollar_lvl);
|
||||
expr_bp(p, r, 255);
|
||||
Some((m.complete(p, kind), BlockLike::NotBlock))
|
||||
}
|
||||
|
||||
|
|
|
@ -69,6 +69,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
|
|||
let done = match p.current() {
|
||||
T!['('] => tuple_expr(p),
|
||||
T!['['] => array_expr(p),
|
||||
L_DOLLAR => meta_var_expr(p),
|
||||
T![|] => lambda_expr(p),
|
||||
T![move] if la == T![|] => lambda_expr(p),
|
||||
T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p),
|
||||
|
@ -554,3 +555,27 @@ fn box_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
|
|||
}
|
||||
m.complete(p, BOX_EXPR)
|
||||
}
|
||||
|
||||
/// Expression from `$var` macro expansion, wrapped in dollars
|
||||
fn meta_var_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(L_DOLLAR));
|
||||
let m = p.start();
|
||||
p.bump(L_DOLLAR);
|
||||
let (completed, _is_block) =
|
||||
expr_bp(p, Restrictions { forbid_structs: false, prefer_stmt: false }, 1);
|
||||
|
||||
match (completed, p.current()) {
|
||||
(Some(it), R_DOLLAR) => {
|
||||
p.bump(R_DOLLAR);
|
||||
m.abandon(p);
|
||||
it
|
||||
}
|
||||
_ => {
|
||||
while !p.at(R_DOLLAR) {
|
||||
p.bump_any()
|
||||
}
|
||||
p.bump(R_DOLLAR);
|
||||
m.complete(p, ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use drop_bomb::DropBomb;
|
|||
use crate::{
|
||||
event::Event,
|
||||
ParseError,
|
||||
SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
|
||||
SyntaxKind::{self, EOF, ERROR, L_DOLLAR, R_DOLLAR, TOMBSTONE},
|
||||
TokenSet, TokenSource, T,
|
||||
};
|
||||
|
||||
|
@ -211,19 +211,26 @@ impl<'t> Parser<'t> {
|
|||
|
||||
/// Create an error node and consume the next token.
|
||||
pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
|
||||
if self.at(T!['{']) || self.at(T!['}']) || self.at_ts(recovery) {
|
||||
match self.current() {
|
||||
T!['{'] | T!['}'] | L_DOLLAR | R_DOLLAR => {
|
||||
self.error(message);
|
||||
return;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if self.at_ts(recovery) {
|
||||
self.error(message);
|
||||
} else {
|
||||
let m = self.start();
|
||||
self.error(message);
|
||||
self.bump_any();
|
||||
m.complete(self, ERROR);
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
let m = self.start();
|
||||
self.error(message);
|
||||
self.bump_any();
|
||||
m.complete(self, ERROR);
|
||||
}
|
||||
|
||||
fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
|
||||
// self.eat_dollars();
|
||||
|
||||
for _ in 0..n_raw_tokens {
|
||||
self.token_source.bump();
|
||||
}
|
||||
|
@ -234,52 +241,6 @@ impl<'t> Parser<'t> {
|
|||
fn push_event(&mut self, event: Event) {
|
||||
self.events.push(event)
|
||||
}
|
||||
|
||||
pub(crate) fn eat_l_dollars(&mut self) -> usize {
|
||||
let mut ate_count = 0;
|
||||
loop {
|
||||
match self.token_source.current().kind {
|
||||
k @ SyntaxKind::L_DOLLAR => {
|
||||
self.token_source.bump();
|
||||
self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
|
||||
ate_count += 1;
|
||||
}
|
||||
_ => {
|
||||
return ate_count;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize {
|
||||
let mut ate_count = 0;
|
||||
loop {
|
||||
match self.token_source.current().kind {
|
||||
k @ SyntaxKind::R_DOLLAR => {
|
||||
self.token_source.bump();
|
||||
self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
|
||||
ate_count += 1;
|
||||
|
||||
if max_count >= ate_count {
|
||||
return ate_count;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return ate_count;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn at_l_dollar(&self) -> bool {
|
||||
let kind = self.token_source.current().kind;
|
||||
(kind == SyntaxKind::L_DOLLAR)
|
||||
}
|
||||
|
||||
pub(crate) fn at_r_dollar(&self) -> bool {
|
||||
let kind = self.token_source.current().kind;
|
||||
(kind == SyntaxKind::R_DOLLAR)
|
||||
}
|
||||
}
|
||||
|
||||
/// See `Parser::start`.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue