mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 05:15:04 +00:00
make token set a const-fn
This commit is contained in:
parent
b028472481
commit
b82fe73d1a
8 changed files with 52 additions and 57 deletions
|
@ -150,7 +150,7 @@ fn name_r(p: &mut Parser, recovery: TokenSet) {
|
|||
}
|
||||
|
||||
fn name(p: &mut Parser) {
|
||||
name_r(p, TokenSet::EMPTY)
|
||||
name_r(p, TokenSet::empty())
|
||||
}
|
||||
|
||||
fn name_ref(p: &mut Parser) {
|
||||
|
|
|
@ -211,10 +211,8 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike {
|
|||
BlockLike::NotBlock
|
||||
}
|
||||
|
||||
const LHS_FIRST: TokenSet = token_set_union![
|
||||
token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS],
|
||||
atom::ATOM_EXPR_FIRST,
|
||||
];
|
||||
const LHS_FIRST: TokenSet =
|
||||
atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]);
|
||||
|
||||
fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
|
||||
let m;
|
||||
|
|
|
@ -36,29 +36,26 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
|
|||
}
|
||||
|
||||
// E.g. for after the break in `if break {}`, this should not match
|
||||
pub(super) const ATOM_EXPR_FIRST: TokenSet = token_set_union![
|
||||
LITERAL_FIRST,
|
||||
token_set![
|
||||
L_PAREN,
|
||||
L_CURLY,
|
||||
L_BRACK,
|
||||
PIPE,
|
||||
MOVE_KW,
|
||||
IF_KW,
|
||||
WHILE_KW,
|
||||
MATCH_KW,
|
||||
UNSAFE_KW,
|
||||
RETURN_KW,
|
||||
IDENT,
|
||||
SELF_KW,
|
||||
SUPER_KW,
|
||||
CRATE_KW,
|
||||
COLONCOLON,
|
||||
BREAK_KW,
|
||||
CONTINUE_KW,
|
||||
LIFETIME
|
||||
],
|
||||
];
|
||||
pub(super) const ATOM_EXPR_FIRST: TokenSet = LITERAL_FIRST.union(token_set![
|
||||
L_PAREN,
|
||||
L_CURLY,
|
||||
L_BRACK,
|
||||
PIPE,
|
||||
MOVE_KW,
|
||||
IF_KW,
|
||||
WHILE_KW,
|
||||
MATCH_KW,
|
||||
UNSAFE_KW,
|
||||
RETURN_KW,
|
||||
IDENT,
|
||||
SELF_KW,
|
||||
SUPER_KW,
|
||||
CRATE_KW,
|
||||
COLONCOLON,
|
||||
BREAK_KW,
|
||||
CONTINUE_KW,
|
||||
LIFETIME,
|
||||
]);
|
||||
|
||||
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
|
||||
|
||||
|
@ -363,7 +360,7 @@ pub(crate) fn match_arm_list(p: &mut Parser) {
|
|||
fn match_arm(p: &mut Parser) -> BlockLike {
|
||||
let m = p.start();
|
||||
p.eat(PIPE);
|
||||
patterns::pattern_r(p, TokenSet::EMPTY);
|
||||
patterns::pattern_r(p, TokenSet::empty());
|
||||
while p.eat(PIPE) {
|
||||
patterns::pattern(p);
|
||||
}
|
||||
|
|
|
@ -61,8 +61,7 @@ fn list_(p: &mut Parser, flavor: Flavor) {
|
|||
m.complete(p, PARAM_LIST);
|
||||
}
|
||||
|
||||
const VALUE_PARAMETER_FIRST: TokenSet =
|
||||
token_set_union![patterns::PATTERN_FIRST, types::TYPE_FIRST,];
|
||||
const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST);
|
||||
|
||||
fn value_parameter(p: &mut Parser, flavor: Flavor) {
|
||||
let m = p.start();
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) const PATTERN_FIRST: TokenSet = token_set_union![
|
||||
token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE],
|
||||
expressions::LITERAL_FIRST,
|
||||
paths::PATH_FIRST,
|
||||
];
|
||||
pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
|
||||
.union(paths::PATH_FIRST)
|
||||
.union(token_set![
|
||||
REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE
|
||||
]);
|
||||
|
||||
pub(super) fn pattern(p: &mut Parser) {
|
||||
pattern_r(p, PAT_RECOVERY_SET)
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) const TYPE_FIRST: TokenSet = token_set_union![
|
||||
token_set![
|
||||
L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW,
|
||||
IMPL_KW, DYN_KW, L_ANGLE,
|
||||
],
|
||||
paths::PATH_FIRST,
|
||||
];
|
||||
pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
|
||||
L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW,
|
||||
DYN_KW, L_ANGLE,
|
||||
]);
|
||||
|
||||
const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA];
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ impl<'t> Parser<'t> {
|
|||
|
||||
/// Create an error node and consume the next token.
|
||||
pub(crate) fn err_and_bump(&mut self, message: &str) {
|
||||
self.err_recover(message, TokenSet::EMPTY);
|
||||
self.err_recover(message, TokenSet::empty());
|
||||
}
|
||||
|
||||
/// Create an error node and consume the next token.
|
||||
|
|
|
@ -1,30 +1,34 @@
|
|||
use crate::SyntaxKind;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) struct TokenSet(pub(crate) u128);
|
||||
|
||||
fn mask(kind: SyntaxKind) -> u128 {
|
||||
1u128 << (kind as usize)
|
||||
}
|
||||
pub(crate) struct TokenSet(u128);
|
||||
|
||||
impl TokenSet {
|
||||
pub const EMPTY: TokenSet = TokenSet(0);
|
||||
pub const fn empty() -> TokenSet {
|
||||
TokenSet(0)
|
||||
}
|
||||
|
||||
pub const fn singleton(kind: SyntaxKind) -> TokenSet {
|
||||
TokenSet(mask(kind))
|
||||
}
|
||||
|
||||
pub const fn union(self, other: TokenSet) -> TokenSet {
|
||||
TokenSet(self.0 | other.0)
|
||||
}
|
||||
|
||||
pub fn contains(&self, kind: SyntaxKind) -> bool {
|
||||
self.0 & mask(kind) != 0
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! token_set {
|
||||
($($t:ident),*) => { TokenSet($(1u128 << ($t as usize))|*) };
|
||||
($($t:ident),* ,) => { token_set!($($t),*) };
|
||||
const fn mask(kind: SyntaxKind) -> u128 {
|
||||
1u128 << (kind as usize)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! token_set_union {
|
||||
($($ts:expr),*) => { TokenSet($($ts.0)|*) };
|
||||
($($ts:expr),* ,) => { token_set_union!($($ts),*) };
|
||||
macro_rules! token_set {
|
||||
($($t:ident),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* };
|
||||
($($t:ident),* ,) => { token_set!($($t),*) };
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue