make token set a const-fn

This commit is contained in:
Aleksey Kladov 2019-01-18 11:02:30 +03:00
parent b028472481
commit b82fe73d1a
8 changed files with 52 additions and 57 deletions

View file

@ -150,7 +150,7 @@ fn name_r(p: &mut Parser, recovery: TokenSet) {
} }
fn name(p: &mut Parser) { fn name(p: &mut Parser) {
name_r(p, TokenSet::EMPTY) name_r(p, TokenSet::empty())
} }
fn name_ref(p: &mut Parser) { fn name_ref(p: &mut Parser) {

View file

@ -211,10 +211,8 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike {
BlockLike::NotBlock BlockLike::NotBlock
} }
const LHS_FIRST: TokenSet = token_set_union![ const LHS_FIRST: TokenSet =
token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS], atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]);
atom::ATOM_EXPR_FIRST,
];
fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
let m; let m;

View file

@ -36,29 +36,26 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
} }
// E.g. for after the break in `if break {}`, this should not match // E.g. for after the break in `if break {}`, this should not match
pub(super) const ATOM_EXPR_FIRST: TokenSet = token_set_union![ pub(super) const ATOM_EXPR_FIRST: TokenSet = LITERAL_FIRST.union(token_set![
LITERAL_FIRST, L_PAREN,
token_set![ L_CURLY,
L_PAREN, L_BRACK,
L_CURLY, PIPE,
L_BRACK, MOVE_KW,
PIPE, IF_KW,
MOVE_KW, WHILE_KW,
IF_KW, MATCH_KW,
WHILE_KW, UNSAFE_KW,
MATCH_KW, RETURN_KW,
UNSAFE_KW, IDENT,
RETURN_KW, SELF_KW,
IDENT, SUPER_KW,
SELF_KW, CRATE_KW,
SUPER_KW, COLONCOLON,
CRATE_KW, BREAK_KW,
COLONCOLON, CONTINUE_KW,
BREAK_KW, LIFETIME,
CONTINUE_KW, ]);
LIFETIME
],
];
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW]; const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
@ -363,7 +360,7 @@ pub(crate) fn match_arm_list(p: &mut Parser) {
fn match_arm(p: &mut Parser) -> BlockLike { fn match_arm(p: &mut Parser) -> BlockLike {
let m = p.start(); let m = p.start();
p.eat(PIPE); p.eat(PIPE);
patterns::pattern_r(p, TokenSet::EMPTY); patterns::pattern_r(p, TokenSet::empty());
while p.eat(PIPE) { while p.eat(PIPE) {
patterns::pattern(p); patterns::pattern(p);
} }

View file

@ -61,8 +61,7 @@ fn list_(p: &mut Parser, flavor: Flavor) {
m.complete(p, PARAM_LIST); m.complete(p, PARAM_LIST);
} }
const VALUE_PARAMETER_FIRST: TokenSet = const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST);
token_set_union![patterns::PATTERN_FIRST, types::TYPE_FIRST,];
fn value_parameter(p: &mut Parser, flavor: Flavor) { fn value_parameter(p: &mut Parser, flavor: Flavor) {
let m = p.start(); let m = p.start();

View file

@ -1,10 +1,10 @@
use super::*; use super::*;
pub(super) const PATTERN_FIRST: TokenSet = token_set_union![ pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE], .union(paths::PATH_FIRST)
expressions::LITERAL_FIRST, .union(token_set![
paths::PATH_FIRST, REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE
]; ]);
pub(super) fn pattern(p: &mut Parser) { pub(super) fn pattern(p: &mut Parser) {
pattern_r(p, PAT_RECOVERY_SET) pattern_r(p, PAT_RECOVERY_SET)

View file

@ -1,12 +1,9 @@
use super::*; use super::*;
pub(super) const TYPE_FIRST: TokenSet = token_set_union![ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
token_set![ L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW,
L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, DYN_KW, L_ANGLE,
IMPL_KW, DYN_KW, L_ANGLE, ]);
],
paths::PATH_FIRST,
];
const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA]; const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA];

View file

@ -112,7 +112,7 @@ impl<'t> Parser<'t> {
/// Create an error node and consume the next token. /// Create an error node and consume the next token.
pub(crate) fn err_and_bump(&mut self, message: &str) { pub(crate) fn err_and_bump(&mut self, message: &str) {
self.err_recover(message, TokenSet::EMPTY); self.err_recover(message, TokenSet::empty());
} }
/// Create an error node and consume the next token. /// Create an error node and consume the next token.

View file

@ -1,30 +1,34 @@
use crate::SyntaxKind; use crate::SyntaxKind;
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
pub(crate) struct TokenSet(pub(crate) u128); pub(crate) struct TokenSet(u128);
fn mask(kind: SyntaxKind) -> u128 {
1u128 << (kind as usize)
}
impl TokenSet { impl TokenSet {
pub const EMPTY: TokenSet = TokenSet(0); pub const fn empty() -> TokenSet {
TokenSet(0)
}
pub const fn singleton(kind: SyntaxKind) -> TokenSet {
TokenSet(mask(kind))
}
pub const fn union(self, other: TokenSet) -> TokenSet {
TokenSet(self.0 | other.0)
}
pub fn contains(&self, kind: SyntaxKind) -> bool { pub fn contains(&self, kind: SyntaxKind) -> bool {
self.0 & mask(kind) != 0 self.0 & mask(kind) != 0
} }
} }
#[macro_export] const fn mask(kind: SyntaxKind) -> u128 {
macro_rules! token_set { 1u128 << (kind as usize)
($($t:ident),*) => { TokenSet($(1u128 << ($t as usize))|*) };
($($t:ident),* ,) => { token_set!($($t),*) };
} }
#[macro_export] #[macro_export]
macro_rules! token_set_union { macro_rules! token_set {
($($ts:expr),*) => { TokenSet($($ts.0)|*) }; ($($t:ident),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* };
($($ts:expr),* ,) => { token_set_union!($($ts),*) }; ($($t:ident),* ,) => { token_set!($($t),*) };
} }
#[test] #[test]