internal: remove unused dollars

This commit is contained in:
Aleksey Kladov 2021-10-23 20:43:45 +03:00
parent 5a83d1be66
commit 485c5e6717
5 changed files with 5 additions and 53 deletions

View file

@ -39,7 +39,6 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
T!['('], T!['('],
T!['{'], T!['{'],
T!['['], T!['['],
L_DOLLAR,
T![|], T![|],
T![move], T![move],
T![box], T![box],
@ -59,7 +58,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
LIFETIME_IDENT, LIFETIME_IDENT,
])); ]));
const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let], R_DOLLAR]); const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let]]);
pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
if let Some(m) = literal(p) { if let Some(m) = literal(p) {
@ -72,7 +71,6 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
let done = match p.current() { let done = match p.current() {
T!['('] => tuple_expr(p), T!['('] => tuple_expr(p),
T!['['] => array_expr(p), T!['['] => array_expr(p),
L_DOLLAR => meta_var_expr(p),
T![|] => closure_expr(p), T![|] => closure_expr(p),
T![move] if la == T![|] => closure_expr(p), T![move] if la == T![|] => closure_expr(p),
T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => closure_expr(p), T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => closure_expr(p),
@ -622,27 +620,3 @@ fn box_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
} }
m.complete(p, BOX_EXPR) m.complete(p, BOX_EXPR)
} }
/// Expression from `$var` macro expansion, wrapped in dollars
fn meta_var_expr(p: &mut Parser) -> CompletedMarker {
assert!(p.at(L_DOLLAR));
let m = p.start();
p.bump(L_DOLLAR);
let expr = expr_bp(p, None, Restrictions { forbid_structs: false, prefer_stmt: false }, 1);
match (expr, p.current()) {
(Some((cm, _)), R_DOLLAR) => {
p.bump(R_DOLLAR);
// FIXME: this leaves the dollar hanging in the air...
m.abandon(p);
cm
}
_ => {
while !p.at(R_DOLLAR) {
p.bump_any();
}
p.bump(R_DOLLAR);
m.complete(p, ERROR)
}
}
}

View file

@ -19,7 +19,6 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[ const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
T![')'], T![')'],
T![,], T![,],
L_DOLLAR,
// test_err struct_field_recover // test_err struct_field_recover
// struct S { f pub g: () } // struct S { f pub g: () }
T![pub], T![pub],

View file

@ -7,7 +7,7 @@ use drop_bomb::DropBomb;
use crate::{ use crate::{
event::Event, event::Event,
ParseError, ParseError,
SyntaxKind::{self, EOF, ERROR, L_DOLLAR, R_DOLLAR, TOMBSTONE}, SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
TokenSet, TokenSource, T, TokenSet, TokenSource, T,
}; };
@ -215,23 +215,13 @@ impl<'t> Parser<'t> {
/// Create an error node and consume the next token. /// Create an error node and consume the next token.
pub(crate) fn err_and_bump(&mut self, message: &str) { pub(crate) fn err_and_bump(&mut self, message: &str) {
match self.current() { self.err_recover(message, TokenSet::EMPTY);
L_DOLLAR | R_DOLLAR => {
let m = self.start();
self.error(message);
self.bump_any();
m.complete(self, ERROR);
}
_ => {
self.err_recover(message, TokenSet::EMPTY);
}
}
} }
/// Create an error node and consume the next token. /// Create an error node and consume the next token.
pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
match self.current() { match self.current() {
T!['{'] | T!['}'] | L_DOLLAR | R_DOLLAR => { T!['{'] | T!['}'] => {
self.error(message); self.error(message);
return; return;
} }

View file

@ -120,8 +120,6 @@ pub enum SyntaxKind {
LIFETIME_IDENT, LIFETIME_IDENT,
COMMENT, COMMENT,
SHEBANG, SHEBANG,
L_DOLLAR,
R_DOLLAR,
SOURCE_FILE, SOURCE_FILE,
STRUCT, STRUCT,
UNION, UNION,

View file

@ -72,16 +72,7 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
], ],
contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"], contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"],
literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"], literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
tokens: &[ tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],
"ERROR",
"IDENT",
"WHITESPACE",
"LIFETIME_IDENT",
"COMMENT",
"SHEBANG",
"L_DOLLAR",
"R_DOLLAR",
],
nodes: &[ nodes: &[
"SOURCE_FILE", "SOURCE_FILE",
"STRUCT", "STRUCT",