mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
move parser to a separate crate
This commit is contained in:
parent
18b0c509f7
commit
d334b5a1db
24 changed files with 91 additions and 18 deletions
|
@ -1,128 +0,0 @@
|
|||
//! This module provides a way to construct a `File`.
|
||||
//! It is intended to be completely decoupled from the
|
||||
//! parser, so as to allow to evolve the tree representation
|
||||
//! and the parser algorithm independently.
|
||||
//!
|
||||
//! The `TreeSink` trait is the bridge between the parser and the
|
||||
//! tree builder: the parser produces a stream of events like
|
||||
//! `start node`, `finish node`, and `FileBuilder` converts
|
||||
//! this stream to a real tree.
|
||||
use std::mem;
|
||||
|
||||
use crate::{
|
||||
SyntaxKind::{self, *},
|
||||
parsing::{ParseError, TreeSink},
|
||||
};
|
||||
|
||||
/// `Parser` produces a flat list of `Event`s.
|
||||
/// They are converted to a tree-structure in
|
||||
/// a separate pass, via `TreeBuilder`.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Event {
|
||||
/// This event signifies the start of the node.
|
||||
/// It should be either abandoned (in which case the
|
||||
/// `kind` is `TOMBSTONE`, and the event is ignored),
|
||||
/// or completed via a `Finish` event.
|
||||
///
|
||||
/// All tokens between a `Start` and a `Finish` would
|
||||
/// become the children of the respective node.
|
||||
///
|
||||
/// For left-recursive syntactic constructs, the parser produces
|
||||
/// a child node before it sees a parent. `forward_parent`
|
||||
/// saves the position of current event's parent.
|
||||
///
|
||||
/// Consider this path
|
||||
///
|
||||
/// foo::bar
|
||||
///
|
||||
/// The events for it would look like this:
|
||||
///
|
||||
///
|
||||
/// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH
|
||||
/// | /\
|
||||
/// | |
|
||||
/// +------forward-parent------+
|
||||
///
|
||||
/// And the tree would look like this
|
||||
///
|
||||
/// +--PATH---------+
|
||||
/// | | |
|
||||
/// | | |
|
||||
/// | '::' 'bar'
|
||||
/// |
|
||||
/// PATH
|
||||
/// |
|
||||
/// 'foo'
|
||||
///
|
||||
/// See also `CompletedMarker::precede`.
|
||||
Start {
|
||||
kind: SyntaxKind,
|
||||
forward_parent: Option<u32>,
|
||||
},
|
||||
|
||||
/// Complete the previous `Start` event
|
||||
Finish,
|
||||
|
||||
/// Produce a single leaf-element.
|
||||
/// `n_raw_tokens` is used to glue complex contextual tokens.
|
||||
/// For example, lexer tokenizes `>>` as `>`, `>`, and
|
||||
/// `n_raw_tokens = 2` is used to produced a single `>>`.
|
||||
Token {
|
||||
kind: SyntaxKind,
|
||||
n_raw_tokens: u8,
|
||||
},
|
||||
|
||||
Error {
|
||||
msg: ParseError,
|
||||
},
|
||||
}
|
||||
|
||||
impl Event {
|
||||
pub(crate) fn tombstone() -> Self {
|
||||
Event::Start { kind: TOMBSTONE, forward_parent: None }
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the syntax tree with the control of events.
|
||||
pub(super) fn process(sink: &mut impl TreeSink, mut events: Vec<Event>) {
|
||||
let mut forward_parents = Vec::new();
|
||||
|
||||
for i in 0..events.len() {
|
||||
match mem::replace(&mut events[i], Event::tombstone()) {
|
||||
Event::Start { kind: TOMBSTONE, .. } => (),
|
||||
|
||||
Event::Start { kind, forward_parent } => {
|
||||
// For events[A, B, C], B is A's forward_parent, C is B's forward_parent,
|
||||
// in the normal control flow, the parent-child relation: `A -> B -> C`,
|
||||
// while with the magic forward_parent, it writes: `C <- B <- A`.
|
||||
|
||||
// append `A` into parents.
|
||||
forward_parents.push(kind);
|
||||
let mut idx = i;
|
||||
let mut fp = forward_parent;
|
||||
while let Some(fwd) = fp {
|
||||
idx += fwd as usize;
|
||||
// append `A`'s forward_parent `B`
|
||||
fp = match mem::replace(&mut events[idx], Event::tombstone()) {
|
||||
Event::Start { kind, forward_parent } => {
|
||||
forward_parents.push(kind);
|
||||
forward_parent
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
// append `B`'s forward_parent `C` in the next stage.
|
||||
}
|
||||
|
||||
for (j, kind) in forward_parents.drain(..).rev().enumerate() {
|
||||
let is_root_node = i == 0 && j == 0;
|
||||
sink.start_branch(kind, is_root_node);
|
||||
}
|
||||
}
|
||||
Event::Finish => sink.finish_branch(i == events.len() - 1),
|
||||
Event::Token { kind, n_raw_tokens } => {
|
||||
sink.leaf(kind, n_raw_tokens);
|
||||
}
|
||||
Event::Error { msg } => sink.error(msg),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,204 +0,0 @@
|
|||
//! This is the actual "grammar" of the Rust language.
|
||||
//!
|
||||
//! Each function in this module and its children corresponds
|
||||
//! to a production of the format grammar. Submodules roughly
|
||||
//! correspond to different *areas* of the grammar. By convention,
|
||||
//! each submodule starts with `use super::*` import and exports
|
||||
//! "public" productions via `pub(super)`.
|
||||
//!
|
||||
//! See docs for `Parser` to learn about API, available to the grammar,
|
||||
//! and see docs for `Event` to learn how this actually manages to
|
||||
//! produce parse trees.
|
||||
//!
|
||||
//! Code in this module also contains inline tests, which start with
|
||||
//! `// test name-of-the-test` comment and look like this:
|
||||
//!
|
||||
//! ```
|
||||
//! // test function_with_zero_parameters
|
||||
//! // fn foo() {}
|
||||
//! ```
|
||||
//!
|
||||
//! After adding a new inline-test, run `cargo collect-tests` to extract
|
||||
//! it as a standalone text-fixture into `tests/data/parser/inline`, and
|
||||
//! run `cargo test` once to create the "gold" value.
|
||||
//!
|
||||
//! Coding convention: rules like `where_clause` always produce either a
|
||||
//! node or an error, rules like `opt_where_clause` may produce nothing.
|
||||
//! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the
|
||||
//! caller is responsible for branching on the first token.
|
||||
mod attributes;
|
||||
mod expressions;
|
||||
mod items;
|
||||
mod params;
|
||||
mod paths;
|
||||
mod patterns;
|
||||
mod type_args;
|
||||
mod type_params;
|
||||
mod types;
|
||||
|
||||
use crate::{
|
||||
SyntaxKind::{self, *},
|
||||
parsing::{
|
||||
token_set::TokenSet,
|
||||
parser::{CompletedMarker, Marker, Parser}
|
||||
},
|
||||
};
|
||||
|
||||
pub(super) fn root(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
p.eat(SHEBANG);
|
||||
items::mod_contents(p, false);
|
||||
m.complete(p, SOURCE_FILE);
|
||||
}
|
||||
|
||||
pub(super) fn reparser(
|
||||
node: SyntaxKind,
|
||||
first_child: Option<SyntaxKind>,
|
||||
parent: Option<SyntaxKind>,
|
||||
) -> Option<fn(&mut Parser)> {
|
||||
let res = match node {
|
||||
BLOCK => expressions::block,
|
||||
NAMED_FIELD_DEF_LIST => items::named_field_def_list,
|
||||
NAMED_FIELD_LIST => items::named_field_list,
|
||||
ENUM_VARIANT_LIST => items::enum_variant_list,
|
||||
MATCH_ARM_LIST => items::match_arm_list,
|
||||
USE_TREE_LIST => items::use_tree_list,
|
||||
EXTERN_ITEM_LIST => items::extern_item_list,
|
||||
TOKEN_TREE if first_child? == L_CURLY => items::token_tree,
|
||||
ITEM_LIST => match parent? {
|
||||
IMPL_BLOCK => items::impl_item_list,
|
||||
TRAIT_DEF => items::trait_item_list,
|
||||
MODULE => items::mod_item_list,
|
||||
_ => return None,
|
||||
},
|
||||
_ => return None,
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
enum BlockLike {
|
||||
Block,
|
||||
NotBlock,
|
||||
}
|
||||
|
||||
impl BlockLike {
|
||||
fn is_block(self) -> bool {
|
||||
self == BlockLike::Block
|
||||
}
|
||||
}
|
||||
|
||||
fn opt_visibility(p: &mut Parser) {
|
||||
match p.current() {
|
||||
PUB_KW => {
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
if p.at(L_PAREN) {
|
||||
match p.nth(1) {
|
||||
// test crate_visibility
|
||||
// pub(crate) struct S;
|
||||
// pub(self) struct S;
|
||||
// pub(self) struct S;
|
||||
// pub(self) struct S;
|
||||
CRATE_KW | SELF_KW | SUPER_KW => {
|
||||
p.bump();
|
||||
p.bump();
|
||||
p.expect(R_PAREN);
|
||||
}
|
||||
IN_KW => {
|
||||
p.bump();
|
||||
p.bump();
|
||||
paths::use_path(p);
|
||||
p.expect(R_PAREN);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
m.complete(p, VISIBILITY);
|
||||
}
|
||||
// test crate_keyword_vis
|
||||
// crate fn main() { }
|
||||
CRATE_KW => {
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
m.complete(p, VISIBILITY);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn opt_alias(p: &mut Parser) {
|
||||
if p.at(AS_KW) {
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
name(p);
|
||||
m.complete(p, ALIAS);
|
||||
}
|
||||
}
|
||||
|
||||
fn abi(p: &mut Parser) {
|
||||
assert!(p.at(EXTERN_KW));
|
||||
let abi = p.start();
|
||||
p.bump();
|
||||
match p.current() {
|
||||
STRING | RAW_STRING => p.bump(),
|
||||
_ => (),
|
||||
}
|
||||
abi.complete(p, ABI);
|
||||
}
|
||||
|
||||
fn opt_fn_ret_type(p: &mut Parser) -> bool {
|
||||
if p.at(THIN_ARROW) {
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
types::type_(p);
|
||||
m.complete(p, RET_TYPE);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn name_r(p: &mut Parser, recovery: TokenSet) {
|
||||
if p.at(IDENT) {
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
m.complete(p, NAME);
|
||||
} else {
|
||||
p.err_recover("expected a name", recovery);
|
||||
}
|
||||
}
|
||||
|
||||
fn name(p: &mut Parser) {
|
||||
name_r(p, TokenSet::empty())
|
||||
}
|
||||
|
||||
fn name_ref(p: &mut Parser) {
|
||||
if p.at(IDENT) {
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
m.complete(p, NAME_REF);
|
||||
} else {
|
||||
p.err_and_bump("expected identifier");
|
||||
}
|
||||
}
|
||||
|
||||
fn error_block(p: &mut Parser, message: &str) {
|
||||
go(p, Some(message));
|
||||
fn go(p: &mut Parser, message: Option<&str>) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
if let Some(message) = message {
|
||||
p.error(message);
|
||||
}
|
||||
p.bump();
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
match p.current() {
|
||||
L_CURLY => go(p, None),
|
||||
_ => p.bump(),
|
||||
}
|
||||
}
|
||||
p.eat(R_CURLY);
|
||||
m.complete(p, ERROR);
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) fn inner_attributes(p: &mut Parser) {
|
||||
while p.current() == POUND && p.nth(1) == EXCL {
|
||||
attribute(p, true)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn outer_attributes(p: &mut Parser) {
|
||||
while p.at(POUND) {
|
||||
attribute(p, false)
|
||||
}
|
||||
}
|
||||
|
||||
fn attribute(p: &mut Parser, inner: bool) {
|
||||
let attr = p.start();
|
||||
assert!(p.at(POUND));
|
||||
p.bump();
|
||||
|
||||
if inner {
|
||||
assert!(p.at(EXCL));
|
||||
p.bump();
|
||||
}
|
||||
|
||||
if p.at(L_BRACK) {
|
||||
items::token_tree(p);
|
||||
} else {
|
||||
p.error("expected `[`");
|
||||
}
|
||||
attr.complete(p, ATTR);
|
||||
}
|
|
@ -1,473 +0,0 @@
|
|||
mod atom;
|
||||
|
||||
pub(crate) use self::atom::match_arm_list;
|
||||
pub(super) use self::atom::{literal, LITERAL_FIRST};
|
||||
use super::*;
|
||||
|
||||
const EXPR_FIRST: TokenSet = LHS_FIRST;
|
||||
|
||||
pub(super) fn expr(p: &mut Parser) -> BlockLike {
|
||||
let r = Restrictions { forbid_structs: false, prefer_stmt: false };
|
||||
expr_bp(p, r, 1)
|
||||
}
|
||||
|
||||
pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike {
|
||||
let r = Restrictions { forbid_structs: false, prefer_stmt: true };
|
||||
expr_bp(p, r, 1)
|
||||
}
|
||||
|
||||
fn expr_no_struct(p: &mut Parser) {
|
||||
let r = Restrictions { forbid_structs: true, prefer_stmt: false };
|
||||
expr_bp(p, r, 1);
|
||||
}
|
||||
|
||||
// test block
|
||||
// fn a() {}
|
||||
// fn b() { let _ = 1; }
|
||||
// fn c() { 1; 2; }
|
||||
// fn d() { 1; 2 }
|
||||
pub(crate) fn block(p: &mut Parser) {
|
||||
if !p.at(L_CURLY) {
|
||||
p.error("expected a block");
|
||||
return;
|
||||
}
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
// This is checked by a validator
|
||||
attributes::inner_attributes(p);
|
||||
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
match p.current() {
|
||||
// test nocontentexpr
|
||||
// fn foo(){
|
||||
// ;;;some_expr();;;;{;;;};;;;Ok(())
|
||||
// }
|
||||
SEMI => p.bump(),
|
||||
_ => {
|
||||
// test block_items
|
||||
// fn a() { fn b() {} }
|
||||
let m = p.start();
|
||||
let has_attrs = p.at(POUND);
|
||||
attributes::outer_attributes(p);
|
||||
if p.at(LET_KW) {
|
||||
let_stmt(p, m);
|
||||
} else {
|
||||
match items::maybe_item(p, items::ItemFlavor::Mod) {
|
||||
items::MaybeItem::Item(kind) => {
|
||||
m.complete(p, kind);
|
||||
}
|
||||
items::MaybeItem::Modifiers => {
|
||||
m.abandon(p);
|
||||
p.error("expected an item");
|
||||
}
|
||||
// test pub_expr
|
||||
// fn foo() { pub 92; } //FIXME
|
||||
items::MaybeItem::None => {
|
||||
if has_attrs {
|
||||
m.abandon(p);
|
||||
p.error(
|
||||
"expected a let statement or an item after attributes in block",
|
||||
);
|
||||
} else {
|
||||
let is_blocklike = expressions::expr_stmt(p) == BlockLike::Block;
|
||||
if p.at(R_CURLY) {
|
||||
m.abandon(p);
|
||||
} else {
|
||||
// test no_semi_after_block
|
||||
// fn foo() {
|
||||
// if true {}
|
||||
// loop {}
|
||||
// match () {}
|
||||
// while true {}
|
||||
// for _ in () {}
|
||||
// {}
|
||||
// {}
|
||||
// macro_rules! test {
|
||||
// () => {}
|
||||
// }
|
||||
// test!{}
|
||||
// }
|
||||
if is_blocklike {
|
||||
p.eat(SEMI);
|
||||
} else {
|
||||
p.expect(SEMI);
|
||||
}
|
||||
m.complete(p, EXPR_STMT);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, BLOCK);
|
||||
|
||||
// test let_stmt;
|
||||
// fn foo() {
|
||||
// let a;
|
||||
// let b: i32;
|
||||
// let c = 92;
|
||||
// let d: i32 = 92;
|
||||
// }
|
||||
fn let_stmt(p: &mut Parser, m: Marker) {
|
||||
assert!(p.at(LET_KW));
|
||||
p.bump();
|
||||
patterns::pattern(p);
|
||||
if p.at(COLON) {
|
||||
types::ascription(p);
|
||||
}
|
||||
if p.eat(EQ) {
|
||||
expressions::expr(p);
|
||||
}
|
||||
p.expect(SEMI);
|
||||
m.complete(p, LET_STMT);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct Restrictions {
|
||||
forbid_structs: bool,
|
||||
prefer_stmt: bool,
|
||||
}
|
||||
|
||||
enum Op {
|
||||
Simple,
|
||||
Composite(SyntaxKind, u8),
|
||||
}
|
||||
|
||||
fn current_op(p: &Parser) -> (u8, Op) {
|
||||
if let Some(t) = p.current3() {
|
||||
match t {
|
||||
(L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)),
|
||||
(R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(t) = p.current2() {
|
||||
match t {
|
||||
(PLUS, EQ) => return (1, Op::Composite(PLUSEQ, 2)),
|
||||
(MINUS, EQ) => return (1, Op::Composite(MINUSEQ, 2)),
|
||||
(STAR, EQ) => return (1, Op::Composite(STAREQ, 2)),
|
||||
(SLASH, EQ) => return (1, Op::Composite(SLASHEQ, 2)),
|
||||
(PIPE, EQ) => return (1, Op::Composite(PIPEEQ, 2)),
|
||||
(AMP, EQ) => return (1, Op::Composite(AMPEQ, 2)),
|
||||
(CARET, EQ) => return (1, Op::Composite(CARETEQ, 2)),
|
||||
(PIPE, PIPE) => return (3, Op::Composite(PIPEPIPE, 2)),
|
||||
(AMP, AMP) => return (4, Op::Composite(AMPAMP, 2)),
|
||||
(L_ANGLE, EQ) => return (5, Op::Composite(LTEQ, 2)),
|
||||
(R_ANGLE, EQ) => return (5, Op::Composite(GTEQ, 2)),
|
||||
(L_ANGLE, L_ANGLE) => return (9, Op::Composite(SHL, 2)),
|
||||
(R_ANGLE, R_ANGLE) => return (9, Op::Composite(SHR, 2)),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
let bp = match p.current() {
|
||||
EQ => 1,
|
||||
DOTDOT | DOTDOTEQ => 2,
|
||||
EQEQ | NEQ | L_ANGLE | R_ANGLE => 5,
|
||||
PIPE => 6,
|
||||
CARET => 7,
|
||||
AMP => 8,
|
||||
MINUS | PLUS => 10,
|
||||
STAR | SLASH | PERCENT => 11,
|
||||
_ => 0,
|
||||
};
|
||||
(bp, Op::Simple)
|
||||
}
|
||||
|
||||
// Parses expression with binding power of at least bp.
|
||||
fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike {
|
||||
let mut lhs = match lhs(p, r) {
|
||||
Some((lhs, blocklike)) => {
|
||||
// test stmt_bin_expr_ambiguity
|
||||
// fn foo() {
|
||||
// let _ = {1} & 2;
|
||||
// {1} &2;
|
||||
// }
|
||||
if r.prefer_stmt && blocklike.is_block() {
|
||||
return BlockLike::Block;
|
||||
}
|
||||
lhs
|
||||
}
|
||||
None => return BlockLike::NotBlock,
|
||||
};
|
||||
|
||||
loop {
|
||||
let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ;
|
||||
let (op_bp, op) = current_op(p);
|
||||
if op_bp < bp {
|
||||
break;
|
||||
}
|
||||
let m = lhs.precede(p);
|
||||
match op {
|
||||
Op::Simple => p.bump(),
|
||||
Op::Composite(kind, n) => {
|
||||
p.bump_compound(kind, n);
|
||||
}
|
||||
}
|
||||
expr_bp(p, r, op_bp + 1);
|
||||
lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
|
||||
}
|
||||
BlockLike::NotBlock
|
||||
}
|
||||
|
||||
const LHS_FIRST: TokenSet =
|
||||
atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]);
|
||||
|
||||
fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
|
||||
let m;
|
||||
let kind = match p.current() {
|
||||
// test ref_expr
|
||||
// fn foo() {
|
||||
// let _ = &1;
|
||||
// let _ = &mut &f();
|
||||
// }
|
||||
AMP => {
|
||||
m = p.start();
|
||||
p.bump();
|
||||
p.eat(MUT_KW);
|
||||
REF_EXPR
|
||||
}
|
||||
// test unary_expr
|
||||
// fn foo() {
|
||||
// **&1;
|
||||
// !!true;
|
||||
// --1;
|
||||
// }
|
||||
STAR | EXCL | MINUS => {
|
||||
m = p.start();
|
||||
p.bump();
|
||||
PREFIX_EXPR
|
||||
}
|
||||
// test full_range_expr
|
||||
// fn foo() { xs[..]; }
|
||||
DOTDOT | DOTDOTEQ => {
|
||||
m = p.start();
|
||||
p.bump();
|
||||
if p.at_ts(EXPR_FIRST) {
|
||||
expr_bp(p, r, 2);
|
||||
}
|
||||
return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
|
||||
}
|
||||
_ => {
|
||||
let (lhs, blocklike) = atom::atom_expr(p, r)?;
|
||||
return Some((
|
||||
postfix_expr(p, lhs, !(r.prefer_stmt && blocklike.is_block())),
|
||||
blocklike,
|
||||
));
|
||||
}
|
||||
};
|
||||
expr_bp(p, r, 255);
|
||||
Some((m.complete(p, kind), BlockLike::NotBlock))
|
||||
}
|
||||
|
||||
fn postfix_expr(
|
||||
p: &mut Parser,
|
||||
mut lhs: CompletedMarker,
|
||||
// Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple
|
||||
// E.g. `while true {break}();` is parsed as
|
||||
// `while true {break}; ();`
|
||||
mut allow_calls: bool,
|
||||
) -> CompletedMarker {
|
||||
loop {
|
||||
lhs = match p.current() {
|
||||
// test stmt_postfix_expr_ambiguity
|
||||
// fn foo() {
|
||||
// match () {
|
||||
// _ => {}
|
||||
// () => {}
|
||||
// [] => {}
|
||||
// }
|
||||
// }
|
||||
L_PAREN if allow_calls => call_expr(p, lhs),
|
||||
L_BRACK if allow_calls => index_expr(p, lhs),
|
||||
DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => {
|
||||
method_call_expr(p, lhs)
|
||||
}
|
||||
DOT => field_expr(p, lhs),
|
||||
// test postfix_range
|
||||
// fn foo() { let x = 1..; }
|
||||
DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => {
|
||||
let m = lhs.precede(p);
|
||||
p.bump();
|
||||
m.complete(p, RANGE_EXPR)
|
||||
}
|
||||
QUESTION => try_expr(p, lhs),
|
||||
AS_KW => cast_expr(p, lhs),
|
||||
_ => break,
|
||||
};
|
||||
allow_calls = true
|
||||
}
|
||||
lhs
|
||||
}
|
||||
|
||||
// test call_expr
|
||||
// fn foo() {
|
||||
// let _ = f();
|
||||
// let _ = f()(1)(1, 2,);
|
||||
// let _ = f(<Foo>::func());
|
||||
// f(<Foo as Trait>::func());
|
||||
// }
|
||||
fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||
assert!(p.at(L_PAREN));
|
||||
let m = lhs.precede(p);
|
||||
arg_list(p);
|
||||
m.complete(p, CALL_EXPR)
|
||||
}
|
||||
|
||||
// test index_expr
|
||||
// fn foo() {
|
||||
// x[1][2];
|
||||
// }
|
||||
fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||
assert!(p.at(L_BRACK));
|
||||
let m = lhs.precede(p);
|
||||
p.bump();
|
||||
expr(p);
|
||||
p.expect(R_BRACK);
|
||||
m.complete(p, INDEX_EXPR)
|
||||
}
|
||||
|
||||
// test method_call_expr
|
||||
// fn foo() {
|
||||
// x.foo();
|
||||
// y.bar::<T>(1, 2,);
|
||||
// }
|
||||
fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||
assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON));
|
||||
let m = lhs.precede(p);
|
||||
p.bump();
|
||||
name_ref(p);
|
||||
type_args::opt_type_arg_list(p, true);
|
||||
if p.at(L_PAREN) {
|
||||
arg_list(p);
|
||||
}
|
||||
m.complete(p, METHOD_CALL_EXPR)
|
||||
}
|
||||
|
||||
// test field_expr
|
||||
// fn foo() {
|
||||
// x.foo;
|
||||
// x.0.bar;
|
||||
// }
|
||||
fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||
assert!(p.at(DOT));
|
||||
let m = lhs.precede(p);
|
||||
p.bump();
|
||||
if p.at(IDENT) {
|
||||
name_ref(p)
|
||||
} else if p.at(INT_NUMBER) {
|
||||
p.bump()
|
||||
} else {
|
||||
p.error("expected field name or number")
|
||||
}
|
||||
m.complete(p, FIELD_EXPR)
|
||||
}
|
||||
|
||||
// test try_expr
|
||||
// fn foo() {
|
||||
// x?;
|
||||
// }
|
||||
fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||
assert!(p.at(QUESTION));
|
||||
let m = lhs.precede(p);
|
||||
p.bump();
|
||||
m.complete(p, TRY_EXPR)
|
||||
}
|
||||
|
||||
// test cast_expr
|
||||
// fn foo() {
|
||||
// 82 as i32;
|
||||
// 81 as i8 + 1;
|
||||
// 79 as i16 - 1;
|
||||
// }
|
||||
fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||
assert!(p.at(AS_KW));
|
||||
let m = lhs.precede(p);
|
||||
p.bump();
|
||||
// Use type_no_bounds(), because cast expressions are not
|
||||
// allowed to have bounds.
|
||||
types::type_no_bounds(p);
|
||||
m.complete(p, CAST_EXPR)
|
||||
}
|
||||
|
||||
fn arg_list(p: &mut Parser) {
|
||||
assert!(p.at(L_PAREN));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(R_PAREN) && !p.at(EOF) {
|
||||
if !p.at_ts(EXPR_FIRST) {
|
||||
p.error("expected expression");
|
||||
break;
|
||||
}
|
||||
expr(p);
|
||||
if !p.at(R_PAREN) && !p.expect(COMMA) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
p.eat(R_PAREN);
|
||||
m.complete(p, ARG_LIST);
|
||||
}
|
||||
|
||||
// test path_expr
|
||||
// fn foo() {
|
||||
// let _ = a;
|
||||
// let _ = a::b;
|
||||
// let _ = ::a::<b>;
|
||||
// let _ = format!();
|
||||
// }
|
||||
fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) {
|
||||
assert!(paths::is_path_start(p) || p.at(L_ANGLE));
|
||||
let m = p.start();
|
||||
paths::expr_path(p);
|
||||
match p.current() {
|
||||
L_CURLY if !r.forbid_structs => {
|
||||
named_field_list(p);
|
||||
(m.complete(p, STRUCT_LIT), BlockLike::NotBlock)
|
||||
}
|
||||
EXCL => {
|
||||
let block_like = items::macro_call_after_excl(p);
|
||||
return (m.complete(p, MACRO_CALL), block_like);
|
||||
}
|
||||
_ => (m.complete(p, PATH_EXPR), BlockLike::NotBlock),
|
||||
}
|
||||
}
|
||||
|
||||
// test struct_lit
|
||||
// fn foo() {
|
||||
// S {};
|
||||
// S { x, y: 32, };
|
||||
// S { x, y: 32, ..Default::default() };
|
||||
// }
|
||||
pub(crate) fn named_field_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
match p.current() {
|
||||
IDENT => {
|
||||
let m = p.start();
|
||||
name_ref(p);
|
||||
if p.eat(COLON) {
|
||||
expr(p);
|
||||
}
|
||||
m.complete(p, NAMED_FIELD);
|
||||
}
|
||||
DOTDOT => {
|
||||
p.bump();
|
||||
expr(p);
|
||||
}
|
||||
L_CURLY => error_block(p, "expected a field"),
|
||||
_ => p.err_and_bump("expected identifier"),
|
||||
}
|
||||
if !p.at(R_CURLY) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, NAMED_FIELD_LIST);
|
||||
}
|
|
@ -1,475 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
// test expr_literals
|
||||
// fn foo() {
|
||||
// let _ = true;
|
||||
// let _ = false;
|
||||
// let _ = 1;
|
||||
// let _ = 2.0;
|
||||
// let _ = b'a';
|
||||
// let _ = 'b';
|
||||
// let _ = "c";
|
||||
// let _ = r"d";
|
||||
// let _ = b"e";
|
||||
// let _ = br"f";
|
||||
// }
|
||||
pub(crate) const LITERAL_FIRST: TokenSet = token_set![
|
||||
TRUE_KW,
|
||||
FALSE_KW,
|
||||
INT_NUMBER,
|
||||
FLOAT_NUMBER,
|
||||
BYTE,
|
||||
CHAR,
|
||||
STRING,
|
||||
RAW_STRING,
|
||||
BYTE_STRING,
|
||||
RAW_BYTE_STRING
|
||||
];
|
||||
|
||||
pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
|
||||
if !p.at_ts(LITERAL_FIRST) {
|
||||
return None;
|
||||
}
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
Some(m.complete(p, LITERAL))
|
||||
}
|
||||
|
||||
// E.g. for after the break in `if break {}`, this should not match
|
||||
pub(super) const ATOM_EXPR_FIRST: TokenSet =
|
||||
LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![
|
||||
L_PAREN,
|
||||
L_CURLY,
|
||||
L_BRACK,
|
||||
PIPE,
|
||||
MOVE_KW,
|
||||
IF_KW,
|
||||
WHILE_KW,
|
||||
MATCH_KW,
|
||||
UNSAFE_KW,
|
||||
RETURN_KW,
|
||||
BREAK_KW,
|
||||
CONTINUE_KW,
|
||||
LIFETIME,
|
||||
]);
|
||||
|
||||
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
|
||||
|
||||
pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
|
||||
if let Some(m) = literal(p) {
|
||||
return Some((m, BlockLike::NotBlock));
|
||||
}
|
||||
if paths::is_path_start(p) || p.at(L_ANGLE) {
|
||||
return Some(path_expr(p, r));
|
||||
}
|
||||
let la = p.nth(1);
|
||||
let done = match p.current() {
|
||||
L_PAREN => tuple_expr(p),
|
||||
L_BRACK => array_expr(p),
|
||||
PIPE => lambda_expr(p),
|
||||
MOVE_KW if la == PIPE => lambda_expr(p),
|
||||
IF_KW => if_expr(p),
|
||||
|
||||
LOOP_KW => loop_expr(p, None),
|
||||
FOR_KW => for_expr(p, None),
|
||||
WHILE_KW => while_expr(p, None),
|
||||
LIFETIME if la == COLON => {
|
||||
let m = p.start();
|
||||
label(p);
|
||||
match p.current() {
|
||||
LOOP_KW => loop_expr(p, Some(m)),
|
||||
FOR_KW => for_expr(p, Some(m)),
|
||||
WHILE_KW => while_expr(p, Some(m)),
|
||||
L_CURLY => block_expr(p, Some(m)),
|
||||
_ => {
|
||||
// test_err misplaced_label_err
|
||||
// fn main() {
|
||||
// 'loop: impl
|
||||
// }
|
||||
p.error("expected a loop");
|
||||
m.complete(p, ERROR);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MATCH_KW => match_expr(p),
|
||||
UNSAFE_KW if la == L_CURLY => {
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
block_expr(p, Some(m))
|
||||
}
|
||||
L_CURLY => block_expr(p, None),
|
||||
RETURN_KW => return_expr(p),
|
||||
CONTINUE_KW => continue_expr(p),
|
||||
BREAK_KW => break_expr(p, r),
|
||||
_ => {
|
||||
p.err_recover("expected expression", EXPR_RECOVERY_SET);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
let blocklike = match done.kind() {
|
||||
IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR => BlockLike::Block,
|
||||
_ => BlockLike::NotBlock,
|
||||
};
|
||||
Some((done, blocklike))
|
||||
}
|
||||
|
||||
// test tuple_expr
|
||||
// fn foo() {
|
||||
// ();
|
||||
// (1);
|
||||
// (1,);
|
||||
// }
|
||||
fn tuple_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(L_PAREN));
|
||||
let m = p.start();
|
||||
p.expect(L_PAREN);
|
||||
|
||||
let mut saw_comma = false;
|
||||
let mut saw_expr = false;
|
||||
while !p.at(EOF) && !p.at(R_PAREN) {
|
||||
saw_expr = true;
|
||||
if !p.at_ts(EXPR_FIRST) {
|
||||
p.error("expected expression");
|
||||
break;
|
||||
}
|
||||
expr(p);
|
||||
if !p.at(R_PAREN) {
|
||||
saw_comma = true;
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_PAREN);
|
||||
m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR })
|
||||
}
|
||||
|
||||
// test array_expr
|
||||
// fn foo() {
|
||||
// [];
|
||||
// [1];
|
||||
// [1, 2,];
|
||||
// [1; 2];
|
||||
// }
|
||||
fn array_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(L_BRACK));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
if p.eat(R_BRACK) {
|
||||
return m.complete(p, ARRAY_EXPR);
|
||||
}
|
||||
expr(p);
|
||||
if p.eat(SEMI) {
|
||||
expr(p);
|
||||
p.expect(R_BRACK);
|
||||
return m.complete(p, ARRAY_EXPR);
|
||||
}
|
||||
while !p.at(EOF) && !p.at(R_BRACK) {
|
||||
p.expect(COMMA);
|
||||
if p.at(R_BRACK) {
|
||||
break;
|
||||
}
|
||||
if !p.at_ts(EXPR_FIRST) {
|
||||
p.error("expected expression");
|
||||
break;
|
||||
}
|
||||
expr(p);
|
||||
}
|
||||
p.expect(R_BRACK);
|
||||
m.complete(p, ARRAY_EXPR)
|
||||
}
|
||||
|
||||
// test lambda_expr
|
||||
// fn foo() {
|
||||
// || ();
|
||||
// || -> i32 { 92 };
|
||||
// |x| x;
|
||||
// move |x: i32,| x;
|
||||
// }
|
||||
fn lambda_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(PIPE) || (p.at(MOVE_KW) && p.nth(1) == PIPE));
|
||||
let m = p.start();
|
||||
p.eat(MOVE_KW);
|
||||
params::param_list_opt_types(p);
|
||||
if opt_fn_ret_type(p) {
|
||||
if !p.at(L_CURLY) {
|
||||
p.error("expected `{`");
|
||||
}
|
||||
}
|
||||
expr(p);
|
||||
m.complete(p, LAMBDA_EXPR)
|
||||
}
|
||||
|
||||
// test if_expr
|
||||
// fn foo() {
|
||||
// if true {};
|
||||
// if true {} else {};
|
||||
// if true {} else if false {} else {};
|
||||
// if S {};
|
||||
// }
|
||||
fn if_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(IF_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
cond(p);
|
||||
block(p);
|
||||
if p.at(ELSE_KW) {
|
||||
p.bump();
|
||||
if p.at(IF_KW) {
|
||||
if_expr(p);
|
||||
} else {
|
||||
block(p);
|
||||
}
|
||||
}
|
||||
m.complete(p, IF_EXPR)
|
||||
}
|
||||
|
||||
// test label
|
||||
// fn foo() {
|
||||
// 'a: loop {}
|
||||
// 'b: while true {}
|
||||
// 'c: for x in () {}
|
||||
// }
|
||||
fn label(p: &mut Parser) {
|
||||
assert!(p.at(LIFETIME) && p.nth(1) == COLON);
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
p.bump();
|
||||
m.complete(p, LABEL);
|
||||
}
|
||||
|
||||
// test loop_expr
|
||||
// fn foo() {
|
||||
// loop {};
|
||||
// }
|
||||
fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
|
||||
assert!(p.at(LOOP_KW));
|
||||
let m = m.unwrap_or_else(|| p.start());
|
||||
p.bump();
|
||||
block(p);
|
||||
m.complete(p, LOOP_EXPR)
|
||||
}
|
||||
|
||||
// test while_expr
|
||||
// fn foo() {
|
||||
// while true {};
|
||||
// while let Some(x) = it.next() {};
|
||||
// }
|
||||
fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
|
||||
assert!(p.at(WHILE_KW));
|
||||
let m = m.unwrap_or_else(|| p.start());
|
||||
p.bump();
|
||||
cond(p);
|
||||
block(p);
|
||||
m.complete(p, WHILE_EXPR)
|
||||
}
|
||||
|
||||
// test for_expr
|
||||
// fn foo() {
|
||||
// for x in [] {};
|
||||
// }
|
||||
fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
|
||||
assert!(p.at(FOR_KW));
|
||||
let m = m.unwrap_or_else(|| p.start());
|
||||
p.bump();
|
||||
patterns::pattern(p);
|
||||
p.expect(IN_KW);
|
||||
expr_no_struct(p);
|
||||
block(p);
|
||||
m.complete(p, FOR_EXPR)
|
||||
}
|
||||
|
||||
// test cond
|
||||
// fn foo() { if let Some(_) = None {} }
|
||||
fn cond(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
if p.eat(LET_KW) {
|
||||
patterns::pattern(p);
|
||||
p.expect(EQ);
|
||||
}
|
||||
expr_no_struct(p);
|
||||
m.complete(p, CONDITION);
|
||||
}
|
||||
|
||||
// test match_expr
|
||||
// fn foo() {
|
||||
// match () { };
|
||||
// match S {};
|
||||
// }
|
||||
fn match_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(MATCH_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
expr_no_struct(p);
|
||||
if p.at(L_CURLY) {
|
||||
match_arm_list(p);
|
||||
} else {
|
||||
p.error("expected `{`")
|
||||
}
|
||||
m.complete(p, MATCH_EXPR)
|
||||
}
|
||||
|
||||
pub(crate) fn match_arm_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.eat(L_CURLY);
|
||||
|
||||
// test match_arms_inner_attribute
|
||||
// fn foo() {
|
||||
// match () {
|
||||
// #![doc("Inner attribute")]
|
||||
// #![doc("Can be")]
|
||||
// #![doc("Stacked")]
|
||||
// _ => (),
|
||||
// }
|
||||
// }
|
||||
attributes::inner_attributes(p);
|
||||
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
if p.at(L_CURLY) {
|
||||
error_block(p, "expected match arm");
|
||||
continue;
|
||||
}
|
||||
|
||||
// test match_arms_outer_attributes
|
||||
// fn foo() {
|
||||
// match () {
|
||||
// #[cfg(feature = "some")]
|
||||
// _ => (),
|
||||
// #[cfg(feature = "other")]
|
||||
// _ => (),
|
||||
// #[cfg(feature = "many")]
|
||||
// #[cfg(feature = "attributes")]
|
||||
// #[cfg(feature = "before")]
|
||||
// _ => (),
|
||||
// }
|
||||
// }
|
||||
attributes::outer_attributes(p);
|
||||
|
||||
// test match_arms_commas
|
||||
// fn foo() {
|
||||
// match () {
|
||||
// _ => (),
|
||||
// _ => {}
|
||||
// _ => ()
|
||||
// }
|
||||
// }
|
||||
if match_arm(p).is_block() {
|
||||
p.eat(COMMA);
|
||||
} else if !p.at(R_CURLY) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, MATCH_ARM_LIST);
|
||||
}
|
||||
|
||||
// test match_arm
|
||||
// fn foo() {
|
||||
// match () {
|
||||
// _ => (),
|
||||
// _ if Test > Test{field: 0} => (),
|
||||
// X | Y if Z => (),
|
||||
// | X | Y if Z => (),
|
||||
// | X => (),
|
||||
// };
|
||||
// }
|
||||
fn match_arm(p: &mut Parser) -> BlockLike {
|
||||
let m = p.start();
|
||||
p.eat(PIPE);
|
||||
patterns::pattern_r(p, TokenSet::empty());
|
||||
while p.eat(PIPE) {
|
||||
patterns::pattern(p);
|
||||
}
|
||||
if p.at(IF_KW) {
|
||||
match_guard(p);
|
||||
}
|
||||
p.expect(FAT_ARROW);
|
||||
let ret = expr_stmt(p);
|
||||
m.complete(p, MATCH_ARM);
|
||||
ret
|
||||
}
|
||||
|
||||
// test match_guard
|
||||
// fn foo() {
|
||||
// match () {
|
||||
// _ if foo => (),
|
||||
// }
|
||||
// }
|
||||
fn match_guard(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(IF_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
expr(p);
|
||||
m.complete(p, MATCH_GUARD)
|
||||
}
|
||||
|
||||
// test block_expr
|
||||
// fn foo() {
|
||||
// {};
|
||||
// unsafe {};
|
||||
// 'label: {};
|
||||
// }
|
||||
fn block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = m.unwrap_or_else(|| p.start());
|
||||
block(p);
|
||||
m.complete(p, BLOCK_EXPR)
|
||||
}
|
||||
|
||||
// test return_expr
|
||||
// fn foo() {
|
||||
// return;
|
||||
// return 92;
|
||||
// }
|
||||
fn return_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(RETURN_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
if p.at_ts(EXPR_FIRST) {
|
||||
expr(p);
|
||||
}
|
||||
m.complete(p, RETURN_EXPR)
|
||||
}
|
||||
|
||||
// test continue_expr
|
||||
// fn foo() {
|
||||
// loop {
|
||||
// continue;
|
||||
// continue 'l;
|
||||
// }
|
||||
// }
|
||||
fn continue_expr(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(CONTINUE_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
p.eat(LIFETIME);
|
||||
m.complete(p, CONTINUE_EXPR)
|
||||
}
|
||||
|
||||
// test break_expr
|
||||
// fn foo() {
|
||||
// loop {
|
||||
// break;
|
||||
// break 'l;
|
||||
// break 92;
|
||||
// break 'l 92;
|
||||
// }
|
||||
// }
|
||||
fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker {
|
||||
assert!(p.at(BREAK_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
p.eat(LIFETIME);
|
||||
// test break_ambiguity
|
||||
// fn foo(){
|
||||
// if break {}
|
||||
// while break {}
|
||||
// for i in break {}
|
||||
// match break {}
|
||||
// }
|
||||
if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(L_CURLY)) {
|
||||
expr(p);
|
||||
}
|
||||
m.complete(p, BREAK_EXPR)
|
||||
}
|
|
@ -1,392 +0,0 @@
|
|||
mod consts;
|
||||
mod nominal;
|
||||
mod traits;
|
||||
mod use_item;
|
||||
|
||||
pub(crate) use self::{
|
||||
expressions::{match_arm_list, named_field_list},
|
||||
nominal::{enum_variant_list, named_field_def_list},
|
||||
traits::{impl_item_list, trait_item_list},
|
||||
use_item::use_tree_list,
|
||||
};
|
||||
use super::*;
|
||||
|
||||
// test mod_contents
|
||||
// fn foo() {}
|
||||
// macro_rules! foo {}
|
||||
// foo::bar!();
|
||||
// super::baz! {}
|
||||
// struct S;
|
||||
pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
||||
attributes::inner_attributes(p);
|
||||
while !p.at(EOF) && !(stop_on_r_curly && p.at(R_CURLY)) {
|
||||
item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) enum ItemFlavor {
|
||||
Mod,
|
||||
Trait,
|
||||
}
|
||||
|
||||
pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![
|
||||
FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW,
|
||||
CRATE_KW
|
||||
];
|
||||
|
||||
pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) {
|
||||
let m = p.start();
|
||||
attributes::outer_attributes(p);
|
||||
match maybe_item(p, flavor) {
|
||||
MaybeItem::Item(kind) => {
|
||||
m.complete(p, kind);
|
||||
}
|
||||
MaybeItem::None => {
|
||||
if paths::is_path_start(p) {
|
||||
match macro_call(p) {
|
||||
BlockLike::Block => (),
|
||||
BlockLike::NotBlock => {
|
||||
p.expect(SEMI);
|
||||
}
|
||||
}
|
||||
m.complete(p, MACRO_CALL);
|
||||
} else {
|
||||
m.abandon(p);
|
||||
if p.at(L_CURLY) {
|
||||
error_block(p, "expected an item");
|
||||
} else if p.at(R_CURLY) && !stop_on_r_curly {
|
||||
let e = p.start();
|
||||
p.error("unmatched `}`");
|
||||
p.bump();
|
||||
e.complete(p, ERROR);
|
||||
} else if !p.at(EOF) && !p.at(R_CURLY) {
|
||||
p.err_and_bump("expected an item");
|
||||
} else {
|
||||
p.error("expected an item");
|
||||
}
|
||||
}
|
||||
}
|
||||
MaybeItem::Modifiers => {
|
||||
p.error("expected fn, trait or impl");
|
||||
m.complete(p, ERROR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) enum MaybeItem {
|
||||
None,
|
||||
Item(SyntaxKind),
|
||||
Modifiers,
|
||||
}
|
||||
|
||||
pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem {
|
||||
opt_visibility(p);
|
||||
if let Some(kind) = items_without_modifiers(p) {
|
||||
return MaybeItem::Item(kind);
|
||||
}
|
||||
|
||||
let mut has_mods = false;
|
||||
// modifiers
|
||||
has_mods |= p.eat(CONST_KW);
|
||||
|
||||
// test_err unsafe_block_in_mod
|
||||
// fn foo(){} unsafe { } fn bar(){}
|
||||
if p.at(UNSAFE_KW) && p.nth(1) != L_CURLY {
|
||||
p.eat(UNSAFE_KW);
|
||||
has_mods = true;
|
||||
}
|
||||
if p.at(EXTERN_KW) {
|
||||
has_mods = true;
|
||||
abi(p);
|
||||
}
|
||||
if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == TRAIT_KW {
|
||||
p.bump_remap(AUTO_KW);
|
||||
has_mods = true;
|
||||
}
|
||||
if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == IMPL_KW {
|
||||
p.bump_remap(DEFAULT_KW);
|
||||
has_mods = true;
|
||||
}
|
||||
|
||||
// items
|
||||
let kind = match p.current() {
|
||||
// test extern_fn
|
||||
// extern fn foo() {}
|
||||
|
||||
// test const_fn
|
||||
// const fn foo() {}
|
||||
|
||||
// test const_unsafe_fn
|
||||
// const unsafe fn foo() {}
|
||||
|
||||
// test unsafe_extern_fn
|
||||
// unsafe extern "C" fn foo() {}
|
||||
|
||||
// test unsafe_fn
|
||||
// unsafe fn foo() {}
|
||||
FN_KW => {
|
||||
fn_def(p, flavor);
|
||||
FN_DEF
|
||||
}
|
||||
|
||||
// test unsafe_trait
|
||||
// unsafe trait T {}
|
||||
|
||||
// test auto_trait
|
||||
// auto trait T {}
|
||||
|
||||
// test unsafe_auto_trait
|
||||
// unsafe auto trait T {}
|
||||
TRAIT_KW => {
|
||||
traits::trait_def(p);
|
||||
TRAIT_DEF
|
||||
}
|
||||
|
||||
// test unsafe_impl
|
||||
// unsafe impl Foo {}
|
||||
|
||||
// test default_impl
|
||||
// default impl Foo {}
|
||||
|
||||
// test unsafe_default_impl
|
||||
// unsafe default impl Foo {}
|
||||
IMPL_KW => {
|
||||
traits::impl_block(p);
|
||||
IMPL_BLOCK
|
||||
}
|
||||
_ => {
|
||||
return if has_mods { MaybeItem::Modifiers } else { MaybeItem::None };
|
||||
}
|
||||
};
|
||||
|
||||
MaybeItem::Item(kind)
|
||||
}
|
||||
|
||||
fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> {
|
||||
let la = p.nth(1);
|
||||
let kind = match p.current() {
|
||||
// test extern_crate
|
||||
// extern crate foo;
|
||||
EXTERN_KW if la == CRATE_KW => {
|
||||
extern_crate_item(p);
|
||||
EXTERN_CRATE_ITEM
|
||||
}
|
||||
TYPE_KW => {
|
||||
type_def(p);
|
||||
TYPE_DEF
|
||||
}
|
||||
MOD_KW => {
|
||||
mod_item(p);
|
||||
MODULE
|
||||
}
|
||||
STRUCT_KW => {
|
||||
// test struct_items
|
||||
// struct Foo;
|
||||
// struct Foo {}
|
||||
// struct Foo();
|
||||
// struct Foo(String, usize);
|
||||
// struct Foo {
|
||||
// a: i32,
|
||||
// b: f32,
|
||||
// }
|
||||
nominal::struct_def(p, STRUCT_KW);
|
||||
if p.at(SEMI) {
|
||||
p.err_and_bump(
|
||||
"expected item, found `;`\n\
|
||||
consider removing this semicolon",
|
||||
);
|
||||
}
|
||||
STRUCT_DEF
|
||||
}
|
||||
IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => {
|
||||
// test union_items
|
||||
// union Foo {}
|
||||
// union Foo {
|
||||
// a: i32,
|
||||
// b: f32,
|
||||
// }
|
||||
nominal::struct_def(p, UNION_KW);
|
||||
STRUCT_DEF
|
||||
}
|
||||
ENUM_KW => {
|
||||
nominal::enum_def(p);
|
||||
ENUM_DEF
|
||||
}
|
||||
USE_KW => {
|
||||
use_item::use_item(p);
|
||||
USE_ITEM
|
||||
}
|
||||
CONST_KW if (la == IDENT || la == MUT_KW) => {
|
||||
consts::const_def(p);
|
||||
CONST_DEF
|
||||
}
|
||||
STATIC_KW => {
|
||||
consts::static_def(p);
|
||||
STATIC_DEF
|
||||
}
|
||||
// test extern_block
|
||||
// extern {}
|
||||
EXTERN_KW
|
||||
if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) =>
|
||||
{
|
||||
abi(p);
|
||||
extern_item_list(p);
|
||||
EXTERN_BLOCK
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
Some(kind)
|
||||
}
|
||||
|
||||
fn extern_crate_item(p: &mut Parser) {
|
||||
assert!(p.at(EXTERN_KW));
|
||||
p.bump();
|
||||
assert!(p.at(CRATE_KW));
|
||||
p.bump();
|
||||
name_ref(p);
|
||||
opt_alias(p);
|
||||
p.expect(SEMI);
|
||||
}
|
||||
|
||||
pub(crate) fn extern_item_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
mod_contents(p, true);
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, EXTERN_ITEM_LIST);
|
||||
}
|
||||
|
||||
fn fn_def(p: &mut Parser, flavor: ItemFlavor) {
|
||||
assert!(p.at(FN_KW));
|
||||
p.bump();
|
||||
|
||||
name_r(p, ITEM_RECOVERY_SET);
|
||||
// test function_type_params
|
||||
// fn foo<T: Clone + Copy>(){}
|
||||
type_params::opt_type_param_list(p);
|
||||
|
||||
if p.at(L_PAREN) {
|
||||
match flavor {
|
||||
ItemFlavor::Mod => params::param_list(p),
|
||||
ItemFlavor::Trait => params::param_list_opt_patterns(p),
|
||||
}
|
||||
} else {
|
||||
p.error("expected function arguments");
|
||||
}
|
||||
// test function_ret_type
|
||||
// fn foo() {}
|
||||
// fn bar() -> () {}
|
||||
opt_fn_ret_type(p);
|
||||
|
||||
// test function_where_clause
|
||||
// fn foo<T>() where T: Copy {}
|
||||
type_params::opt_where_clause(p);
|
||||
|
||||
// test fn_decl
|
||||
// trait T { fn foo(); }
|
||||
if p.at(SEMI) {
|
||||
p.bump();
|
||||
} else {
|
||||
expressions::block(p)
|
||||
}
|
||||
}
|
||||
|
||||
// test type_item
|
||||
// type Foo = Bar;
|
||||
fn type_def(p: &mut Parser) {
|
||||
assert!(p.at(TYPE_KW));
|
||||
p.bump();
|
||||
|
||||
name(p);
|
||||
|
||||
// test type_item_type_params
|
||||
// type Result<T> = ();
|
||||
type_params::opt_type_param_list(p);
|
||||
|
||||
if p.at(COLON) {
|
||||
type_params::bounds(p);
|
||||
}
|
||||
|
||||
// test type_item_where_clause
|
||||
// type Foo where Foo: Copy = ();
|
||||
type_params::opt_where_clause(p);
|
||||
|
||||
if p.eat(EQ) {
|
||||
types::type_(p);
|
||||
}
|
||||
p.expect(SEMI);
|
||||
}
|
||||
|
||||
pub(crate) fn mod_item(p: &mut Parser) {
|
||||
assert!(p.at(MOD_KW));
|
||||
p.bump();
|
||||
|
||||
name(p);
|
||||
if p.at(L_CURLY) {
|
||||
mod_item_list(p);
|
||||
} else if !p.eat(SEMI) {
|
||||
p.error("expected `;` or `{`");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn mod_item_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
mod_contents(p, true);
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, ITEM_LIST);
|
||||
}
|
||||
|
||||
fn macro_call(p: &mut Parser) -> BlockLike {
|
||||
assert!(paths::is_path_start(p));
|
||||
paths::use_path(p);
|
||||
macro_call_after_excl(p)
|
||||
}
|
||||
|
||||
pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
|
||||
p.expect(EXCL);
|
||||
if p.at(IDENT) {
|
||||
name(p);
|
||||
}
|
||||
match p.current() {
|
||||
L_CURLY => {
|
||||
token_tree(p);
|
||||
BlockLike::Block
|
||||
}
|
||||
L_PAREN | L_BRACK => {
|
||||
token_tree(p);
|
||||
BlockLike::NotBlock
|
||||
}
|
||||
_ => {
|
||||
p.error("expected `{`, `[`, `(`");
|
||||
BlockLike::NotBlock
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn token_tree(p: &mut Parser) {
|
||||
let closing_paren_kind = match p.current() {
|
||||
L_CURLY => R_CURLY,
|
||||
L_PAREN => R_PAREN,
|
||||
L_BRACK => R_BRACK,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(EOF) && !p.at(closing_paren_kind) {
|
||||
match p.current() {
|
||||
L_CURLY | L_PAREN | L_BRACK => token_tree(p),
|
||||
R_CURLY => {
|
||||
p.error("unmatched `}`");
|
||||
m.complete(p, TOKEN_TREE);
|
||||
return;
|
||||
}
|
||||
R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"),
|
||||
_ => p.bump(),
|
||||
}
|
||||
}
|
||||
p.expect(closing_paren_kind);
|
||||
m.complete(p, TOKEN_TREE);
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) fn static_def(p: &mut Parser) {
|
||||
const_or_static(p, STATIC_KW)
|
||||
}
|
||||
|
||||
pub(super) fn const_def(p: &mut Parser) {
|
||||
const_or_static(p, CONST_KW)
|
||||
}
|
||||
|
||||
fn const_or_static(p: &mut Parser, kw: SyntaxKind) {
|
||||
assert!(p.at(kw));
|
||||
p.bump();
|
||||
p.eat(MUT_KW); // TODO: validator to forbid const mut
|
||||
name(p);
|
||||
types::ascription(p);
|
||||
if p.eat(EQ) {
|
||||
expressions::expr(p);
|
||||
}
|
||||
p.expect(SEMI);
|
||||
}
|
|
@ -1,168 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) fn struct_def(p: &mut Parser, kind: SyntaxKind) {
|
||||
assert!(p.at(STRUCT_KW) || p.at_contextual_kw("union"));
|
||||
p.bump_remap(kind);
|
||||
|
||||
name_r(p, ITEM_RECOVERY_SET);
|
||||
type_params::opt_type_param_list(p);
|
||||
match p.current() {
|
||||
WHERE_KW => {
|
||||
type_params::opt_where_clause(p);
|
||||
match p.current() {
|
||||
SEMI => {
|
||||
p.bump();
|
||||
return;
|
||||
}
|
||||
L_CURLY => named_field_def_list(p),
|
||||
_ => {
|
||||
//TODO: special case `(` error message
|
||||
p.error("expected `;` or `{`");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
SEMI if kind == STRUCT_KW => {
|
||||
p.bump();
|
||||
return;
|
||||
}
|
||||
L_CURLY => named_field_def_list(p),
|
||||
L_PAREN if kind == STRUCT_KW => {
|
||||
pos_field_def_list(p);
|
||||
// test tuple_struct_where
|
||||
// struct Test<T>(T) where T: Clone;
|
||||
// struct Test<T>(T);
|
||||
type_params::opt_where_clause(p);
|
||||
p.expect(SEMI);
|
||||
}
|
||||
_ if kind == STRUCT_KW => {
|
||||
p.error("expected `;`, `{`, or `(`");
|
||||
return;
|
||||
}
|
||||
_ => {
|
||||
p.error("expected `{`");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn enum_def(p: &mut Parser) {
|
||||
assert!(p.at(ENUM_KW));
|
||||
p.bump();
|
||||
name_r(p, ITEM_RECOVERY_SET);
|
||||
type_params::opt_type_param_list(p);
|
||||
type_params::opt_where_clause(p);
|
||||
if p.at(L_CURLY) {
|
||||
enum_variant_list(p);
|
||||
} else {
|
||||
p.error("expected `{`")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn enum_variant_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
if p.at(L_CURLY) {
|
||||
error_block(p, "expected enum variant");
|
||||
continue;
|
||||
}
|
||||
let var = p.start();
|
||||
attributes::outer_attributes(p);
|
||||
if p.at(IDENT) {
|
||||
name(p);
|
||||
match p.current() {
|
||||
L_CURLY => named_field_def_list(p),
|
||||
L_PAREN => pos_field_def_list(p),
|
||||
EQ => {
|
||||
p.bump();
|
||||
expressions::expr(p);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
var.complete(p, ENUM_VARIANT);
|
||||
} else {
|
||||
var.abandon(p);
|
||||
p.err_and_bump("expected enum variant");
|
||||
}
|
||||
if !p.at(R_CURLY) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, ENUM_VARIANT_LIST);
|
||||
}
|
||||
|
||||
pub(crate) fn named_field_def_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(R_CURLY) && !p.at(EOF) {
|
||||
if p.at(L_CURLY) {
|
||||
error_block(p, "expected field");
|
||||
continue;
|
||||
}
|
||||
named_field_def(p);
|
||||
if !p.at(R_CURLY) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, NAMED_FIELD_DEF_LIST);
|
||||
|
||||
fn named_field_def(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
// test field_attrs
|
||||
// struct S {
|
||||
// #[serde(with = "url_serde")]
|
||||
// pub uri: Uri,
|
||||
// }
|
||||
attributes::outer_attributes(p);
|
||||
opt_visibility(p);
|
||||
if p.at(IDENT) {
|
||||
name(p);
|
||||
p.expect(COLON);
|
||||
types::type_(p);
|
||||
m.complete(p, NAMED_FIELD_DEF);
|
||||
} else {
|
||||
m.abandon(p);
|
||||
p.err_and_bump("expected field declaration");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pos_field_def_list(p: &mut Parser) {
|
||||
assert!(p.at(L_PAREN));
|
||||
let m = p.start();
|
||||
if !p.expect(L_PAREN) {
|
||||
return;
|
||||
}
|
||||
while !p.at(R_PAREN) && !p.at(EOF) {
|
||||
let m = p.start();
|
||||
// test pos_field_attrs
|
||||
// struct S (
|
||||
// #[serde(with = "url_serde")]
|
||||
// pub Uri,
|
||||
// );
|
||||
//
|
||||
// enum S {
|
||||
// Uri(#[serde(with = "url_serde")] Uri),
|
||||
// }
|
||||
attributes::outer_attributes(p);
|
||||
opt_visibility(p);
|
||||
if !p.at_ts(types::TYPE_FIRST) {
|
||||
p.error("expected a type");
|
||||
m.complete(p, ERROR);
|
||||
break;
|
||||
}
|
||||
types::type_(p);
|
||||
m.complete(p, POS_FIELD_DEF);
|
||||
|
||||
if !p.at(R_PAREN) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_PAREN);
|
||||
m.complete(p, POS_FIELD_DEF_LIST);
|
||||
}
|
|
@ -1,137 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
// test trait_item
|
||||
// trait T<U>: Hash + Clone where U: Copy {}
|
||||
pub(super) fn trait_def(p: &mut Parser) {
|
||||
assert!(p.at(TRAIT_KW));
|
||||
p.bump();
|
||||
name_r(p, ITEM_RECOVERY_SET);
|
||||
type_params::opt_type_param_list(p);
|
||||
if p.at(COLON) {
|
||||
type_params::bounds(p);
|
||||
}
|
||||
type_params::opt_where_clause(p);
|
||||
if p.at(L_CURLY) {
|
||||
trait_item_list(p);
|
||||
} else {
|
||||
p.error("expected `{`");
|
||||
}
|
||||
}
|
||||
|
||||
// test trait_item_list
|
||||
// impl F {
|
||||
// type A: Clone;
|
||||
// const B: i32;
|
||||
// fn foo() {}
|
||||
// fn bar(&self);
|
||||
// }
|
||||
pub(crate) fn trait_item_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
if p.at(L_CURLY) {
|
||||
error_block(p, "expected an item");
|
||||
continue;
|
||||
}
|
||||
item_or_macro(p, true, ItemFlavor::Trait);
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, ITEM_LIST);
|
||||
}
|
||||
|
||||
// test impl_block
|
||||
// impl Foo {}
|
||||
pub(super) fn impl_block(p: &mut Parser) {
|
||||
assert!(p.at(IMPL_KW));
|
||||
p.bump();
|
||||
if choose_type_params_over_qpath(p) {
|
||||
type_params::opt_type_param_list(p);
|
||||
}
|
||||
|
||||
// TODO: never type
|
||||
// impl ! {}
|
||||
|
||||
// test impl_block_neg
|
||||
// impl !Send for X {}
|
||||
p.eat(EXCL);
|
||||
impl_type(p);
|
||||
if p.eat(FOR_KW) {
|
||||
impl_type(p);
|
||||
}
|
||||
type_params::opt_where_clause(p);
|
||||
if p.at(L_CURLY) {
|
||||
impl_item_list(p);
|
||||
} else {
|
||||
p.error("expected `{`");
|
||||
}
|
||||
}
|
||||
|
||||
// test impl_item_list
|
||||
// impl F {
|
||||
// type A = i32;
|
||||
// const B: i32 = 92;
|
||||
// fn foo() {}
|
||||
// fn bar(&self) {}
|
||||
// }
|
||||
pub(crate) fn impl_item_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
// test impl_inner_attributes
|
||||
// enum F{}
|
||||
// impl F {
|
||||
// //! This is a doc comment
|
||||
// #![doc("This is also a doc comment")]
|
||||
// }
|
||||
attributes::inner_attributes(p);
|
||||
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
if p.at(L_CURLY) {
|
||||
error_block(p, "expected an item");
|
||||
continue;
|
||||
}
|
||||
item_or_macro(p, true, ItemFlavor::Mod);
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, ITEM_LIST);
|
||||
}
|
||||
|
||||
fn choose_type_params_over_qpath(p: &Parser) -> bool {
|
||||
// There's an ambiguity between generic parameters and qualified paths in impls.
|
||||
// If we see `<` it may start both, so we have to inspect some following tokens.
|
||||
// The following combinations can only start generics,
|
||||
// but not qualified paths (with one exception):
|
||||
// `<` `>` - empty generic parameters
|
||||
// `<` `#` - generic parameters with attributes
|
||||
// `<` (LIFETIME|IDENT) `>` - single generic parameter
|
||||
// `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
|
||||
// `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
|
||||
// `<` (LIFETIME|IDENT) `=` - generic parameter with a default
|
||||
// The only truly ambiguous case is
|
||||
// `<` IDENT `>` `::` IDENT ...
|
||||
// we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
|
||||
// because this is what almost always expected in practice, qualified paths in impls
|
||||
// (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
|
||||
if !p.at(L_ANGLE) {
|
||||
return false;
|
||||
}
|
||||
if p.nth(1) == POUND || p.nth(1) == R_ANGLE {
|
||||
return true;
|
||||
}
|
||||
(p.nth(1) == LIFETIME || p.nth(1) == IDENT)
|
||||
&& (p.nth(2) == R_ANGLE || p.nth(2) == COMMA || p.nth(2) == COLON || p.nth(2) == EQ)
|
||||
}
|
||||
|
||||
// test_err impl_type
|
||||
// impl Type {}
|
||||
// impl Trait1 for T {}
|
||||
// impl impl NotType {}
|
||||
// impl Trait2 for impl NotType {}
|
||||
pub(crate) fn impl_type(p: &mut Parser) {
|
||||
if p.at(IMPL_KW) {
|
||||
p.error("expected trait or type");
|
||||
return;
|
||||
}
|
||||
types::type_(p);
|
||||
}
|
|
@ -1,121 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) fn use_item(p: &mut Parser) {
|
||||
assert!(p.at(USE_KW));
|
||||
p.bump();
|
||||
use_tree(p);
|
||||
p.expect(SEMI);
|
||||
}
|
||||
|
||||
/// Parse a use 'tree', such as `some::path` in `use some::path;`
|
||||
/// Note that this is called both by `use_item` and `use_tree_list`,
|
||||
/// so handles both `some::path::{inner::path}` and `inner::path` in
|
||||
/// `use some::path::{inner::path};`
|
||||
fn use_tree(p: &mut Parser) {
|
||||
let la = p.nth(1);
|
||||
let m = p.start();
|
||||
match (p.current(), la) {
|
||||
// Finish the use_tree for cases of e.g.
|
||||
// `use some::path::{self, *};` or `use *;`
|
||||
// This does not handle cases such as `use some::path::*`
|
||||
// N.B. in Rust 2015 `use *;` imports all from crate root
|
||||
// however in Rust 2018 `use *;` errors: ('cannot glob-import all possible crates')
|
||||
// TODO: Add this error (if not out of scope)
|
||||
|
||||
// test use_star
|
||||
// use *;
|
||||
// use ::*;
|
||||
// use some::path::{*};
|
||||
// use some::path::{::*};
|
||||
(STAR, _) => p.bump(),
|
||||
(COLONCOLON, STAR) => {
|
||||
// Parse `use ::*;`, which imports all from the crate root in Rust 2015
|
||||
// This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`)
|
||||
// but still parses and errors later: ('crate root in paths can only be used in start position')
|
||||
// TODO: Add this error (if not out of scope)
|
||||
// In Rust 2018, it is always invalid (see above)
|
||||
p.bump();
|
||||
p.bump();
|
||||
}
|
||||
// Open a use tree list
|
||||
// Handles cases such as `use {some::path};` or `{inner::path}` in
|
||||
// `use some::path::{{inner::path}, other::path}`
|
||||
|
||||
// test use_tree_list
|
||||
// use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`)
|
||||
// use {path::from::root}; // Rust 2015
|
||||
// use ::{some::arbritrary::path}; // Rust 2015
|
||||
// use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig
|
||||
(L_CURLY, _) | (COLONCOLON, L_CURLY) => {
|
||||
if p.at(COLONCOLON) {
|
||||
p.bump();
|
||||
}
|
||||
use_tree_list(p);
|
||||
}
|
||||
// Parse a 'standard' path.
|
||||
// Also handles aliases (e.g. `use something as something_else`)
|
||||
|
||||
// test use_path
|
||||
// use ::crate_name; // Rust 2018 - All flavours
|
||||
// use crate_name; // Rust 2018 - Anchored paths
|
||||
// use item_in_scope_or_crate_name; // Rust 2018 - Uniform Paths
|
||||
//
|
||||
// use self::module::Item;
|
||||
// use crate::Item;
|
||||
// use self::some::Struct;
|
||||
// use crate_name::some_item;
|
||||
_ if paths::is_path_start(p) => {
|
||||
paths::use_path(p);
|
||||
match p.current() {
|
||||
AS_KW => {
|
||||
// test use_alias
|
||||
// use some::path as some_name;
|
||||
// use some::{
|
||||
// other::path as some_other_name,
|
||||
// different::path as different_name,
|
||||
// yet::another::path,
|
||||
// running::out::of::synonyms::for_::different::*
|
||||
// };
|
||||
opt_alias(p);
|
||||
}
|
||||
COLONCOLON => {
|
||||
p.bump();
|
||||
match p.current() {
|
||||
STAR => {
|
||||
p.bump();
|
||||
}
|
||||
// test use_tree_list_after_path
|
||||
// use crate::{Item};
|
||||
// use self::{Item};
|
||||
L_CURLY => use_tree_list(p),
|
||||
_ => {
|
||||
// is this unreachable?
|
||||
p.error("expected `{` or `*`");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
m.abandon(p);
|
||||
p.err_and_bump("expected one of `*`, `::`, `{`, `self`, `super` or an indentifier");
|
||||
return;
|
||||
}
|
||||
}
|
||||
m.complete(p, USE_TREE);
|
||||
}
|
||||
|
||||
pub(crate) fn use_tree_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
use_tree(p);
|
||||
if !p.at(R_CURLY) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, USE_TREE_LIST);
|
||||
}
|
|
@ -1,139 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
// test param_list
|
||||
// fn a() {}
|
||||
// fn b(x: i32) {}
|
||||
// fn c(x: i32, ) {}
|
||||
// fn d(x: i32, y: ()) {}
|
||||
pub(super) fn param_list(p: &mut Parser) {
|
||||
list_(p, Flavor::Normal)
|
||||
}
|
||||
|
||||
// test param_list_opt_patterns
|
||||
// fn foo<F: FnMut(&mut Foo<'a>)>(){}
|
||||
pub(super) fn param_list_opt_patterns(p: &mut Parser) {
|
||||
list_(p, Flavor::OptionalPattern)
|
||||
}
|
||||
|
||||
pub(super) fn param_list_opt_types(p: &mut Parser) {
|
||||
list_(p, Flavor::OptionalType)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
enum Flavor {
|
||||
OptionalType,
|
||||
OptionalPattern,
|
||||
Normal,
|
||||
}
|
||||
|
||||
impl Flavor {
|
||||
fn type_required(self) -> bool {
|
||||
match self {
|
||||
Flavor::OptionalType => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list_(p: &mut Parser, flavor: Flavor) {
|
||||
let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) };
|
||||
assert!(p.at(bra));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
if flavor.type_required() {
|
||||
opt_self_param(p);
|
||||
}
|
||||
while !p.at(EOF) && !p.at(ket) {
|
||||
if !p.at_ts(VALUE_PARAMETER_FIRST) {
|
||||
p.error("expected value parameter");
|
||||
break;
|
||||
}
|
||||
value_parameter(p, flavor);
|
||||
if !p.at(ket) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(ket);
|
||||
m.complete(p, PARAM_LIST);
|
||||
}
|
||||
|
||||
const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST);
|
||||
|
||||
fn value_parameter(p: &mut Parser, flavor: Flavor) {
|
||||
let m = p.start();
|
||||
match flavor {
|
||||
Flavor::OptionalType | Flavor::Normal => {
|
||||
patterns::pattern(p);
|
||||
if p.at(COLON) || flavor.type_required() {
|
||||
types::ascription(p)
|
||||
}
|
||||
}
|
||||
// test value_parameters_no_patterns
|
||||
// type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>;
|
||||
Flavor::OptionalPattern => {
|
||||
let la0 = p.current();
|
||||
let la1 = p.nth(1);
|
||||
let la2 = p.nth(2);
|
||||
let la3 = p.nth(3);
|
||||
|
||||
// test trait_fn_placeholder_parameter
|
||||
// trait Foo {
|
||||
// fn bar(_: u64);
|
||||
// }
|
||||
if (la0 == IDENT || la0 == UNDERSCORE) && la1 == COLON
|
||||
|| la0 == AMP && la1 == IDENT && la2 == COLON
|
||||
|| la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON
|
||||
{
|
||||
patterns::pattern(p);
|
||||
types::ascription(p);
|
||||
} else {
|
||||
types::type_(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
m.complete(p, PARAM);
|
||||
}
|
||||
|
||||
// test self_param
|
||||
// impl S {
|
||||
// fn a(self) {}
|
||||
// fn b(&self,) {}
|
||||
// fn c(&'a self,) {}
|
||||
// fn d(&'a mut self, x: i32) {}
|
||||
// fn e(mut self) {}
|
||||
// }
|
||||
fn opt_self_param(p: &mut Parser) {
|
||||
let m;
|
||||
if p.at(SELF_KW) || p.at(MUT_KW) && p.nth(1) == SELF_KW {
|
||||
m = p.start();
|
||||
p.eat(MUT_KW);
|
||||
p.eat(SELF_KW);
|
||||
// test arb_self_types
|
||||
// impl S {
|
||||
// fn a(self: &Self) {}
|
||||
// fn b(mut self: Box<Self>) {}
|
||||
// }
|
||||
if p.at(COLON) {
|
||||
types::ascription(p);
|
||||
}
|
||||
} else {
|
||||
let la1 = p.nth(1);
|
||||
let la2 = p.nth(2);
|
||||
let la3 = p.nth(3);
|
||||
let n_toks = match (p.current(), la1, la2, la3) {
|
||||
(AMP, SELF_KW, _, _) => 2,
|
||||
(AMP, MUT_KW, SELF_KW, _) => 3,
|
||||
(AMP, LIFETIME, SELF_KW, _) => 3,
|
||||
(AMP, LIFETIME, MUT_KW, SELF_KW) => 4,
|
||||
_ => return,
|
||||
};
|
||||
m = p.start();
|
||||
for _ in 0..n_toks {
|
||||
p.bump();
|
||||
}
|
||||
}
|
||||
m.complete(p, SELF_PARAM);
|
||||
if !p.at(R_PAREN) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
|
@ -1,103 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) const PATH_FIRST: TokenSet =
|
||||
token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, L_ANGLE];
|
||||
|
||||
pub(super) fn is_path_start(p: &Parser) -> bool {
|
||||
match p.current() {
|
||||
IDENT | SELF_KW | SUPER_KW | CRATE_KW | COLONCOLON => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn use_path(p: &mut Parser) {
|
||||
path(p, Mode::Use)
|
||||
}
|
||||
|
||||
pub(super) fn type_path(p: &mut Parser) {
|
||||
path(p, Mode::Type)
|
||||
}
|
||||
|
||||
pub(super) fn expr_path(p: &mut Parser) {
|
||||
path(p, Mode::Expr)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
enum Mode {
|
||||
Use,
|
||||
Type,
|
||||
Expr,
|
||||
}
|
||||
|
||||
fn path(p: &mut Parser, mode: Mode) {
|
||||
let path = p.start();
|
||||
path_segment(p, mode, true);
|
||||
let mut qual = path.complete(p, PATH);
|
||||
loop {
|
||||
let use_tree = match p.nth(1) {
|
||||
STAR | L_CURLY => true,
|
||||
_ => false,
|
||||
};
|
||||
if p.at(COLONCOLON) && !use_tree {
|
||||
let path = qual.precede(p);
|
||||
p.bump();
|
||||
path_segment(p, mode, false);
|
||||
let path = path.complete(p, PATH);
|
||||
qual = path;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn path_segment(p: &mut Parser, mode: Mode, first: bool) {
|
||||
let m = p.start();
|
||||
// test qual_paths
|
||||
// type X = <A as B>::Output;
|
||||
// fn foo() { <usize as Default>::default(); }
|
||||
if first && p.eat(L_ANGLE) {
|
||||
types::type_(p);
|
||||
if p.eat(AS_KW) {
|
||||
if is_path_start(p) {
|
||||
types::path_type(p);
|
||||
} else {
|
||||
p.error("expected a trait");
|
||||
}
|
||||
}
|
||||
p.expect(R_ANGLE);
|
||||
} else {
|
||||
if first {
|
||||
p.eat(COLONCOLON);
|
||||
}
|
||||
match p.current() {
|
||||
IDENT => {
|
||||
name_ref(p);
|
||||
opt_path_type_args(p, mode);
|
||||
}
|
||||
// test crate_path
|
||||
// use crate::foo;
|
||||
SELF_KW | SUPER_KW | CRATE_KW => p.bump(),
|
||||
_ => {
|
||||
p.err_recover("expected identifier", items::ITEM_RECOVERY_SET);
|
||||
}
|
||||
};
|
||||
}
|
||||
m.complete(p, PATH_SEGMENT);
|
||||
}
|
||||
|
||||
fn opt_path_type_args(p: &mut Parser, mode: Mode) {
|
||||
match mode {
|
||||
Mode::Use => return,
|
||||
Mode::Type => {
|
||||
// test path_fn_trait_args
|
||||
// type F = Box<Fn(x: i32) -> ()>;
|
||||
if p.at(L_PAREN) {
|
||||
params::param_list_opt_patterns(p);
|
||||
opt_fn_ret_type(p);
|
||||
} else {
|
||||
type_args::opt_type_arg_list(p, false)
|
||||
}
|
||||
}
|
||||
Mode::Expr => type_args::opt_type_arg_list(p, true),
|
||||
}
|
||||
}
|
|
@ -1,248 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
|
||||
.union(paths::PATH_FIRST)
|
||||
.union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE]);
|
||||
|
||||
pub(super) fn pattern(p: &mut Parser) {
|
||||
pattern_r(p, PAT_RECOVERY_SET)
|
||||
}
|
||||
|
||||
pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) {
|
||||
if let Some(lhs) = atom_pat(p, recovery_set) {
|
||||
// test range_pat
|
||||
// fn main() {
|
||||
// match 92 {
|
||||
// 0 ... 100 => (),
|
||||
// 101 ..= 200 => (),
|
||||
// 200 .. 301=> (),
|
||||
// }
|
||||
// }
|
||||
if p.at(DOTDOTDOT) || p.at(DOTDOTEQ) || p.at(DOTDOT) {
|
||||
let m = lhs.precede(p);
|
||||
p.bump();
|
||||
atom_pat(p, recovery_set);
|
||||
m.complete(p, RANGE_PAT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const PAT_RECOVERY_SET: TokenSet =
|
||||
token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA];
|
||||
|
||||
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
||||
let la0 = p.nth(0);
|
||||
let la1 = p.nth(1);
|
||||
if la0 == REF_KW
|
||||
|| la0 == MUT_KW
|
||||
|| (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY))
|
||||
{
|
||||
return Some(bind_pat(p, true));
|
||||
}
|
||||
if paths::is_path_start(p) {
|
||||
return Some(path_pat(p));
|
||||
}
|
||||
|
||||
if is_literal_pat_start(p) {
|
||||
return Some(literal_pat(p));
|
||||
}
|
||||
|
||||
let m = match la0 {
|
||||
UNDERSCORE => placeholder_pat(p),
|
||||
AMP => ref_pat(p),
|
||||
L_PAREN => tuple_pat(p),
|
||||
L_BRACK => slice_pat(p),
|
||||
_ => {
|
||||
p.err_recover("expected pattern", recovery_set);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
Some(m)
|
||||
}
|
||||
|
||||
fn is_literal_pat_start(p: &mut Parser) -> bool {
|
||||
p.at(MINUS) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER)
|
||||
|| p.at_ts(expressions::LITERAL_FIRST)
|
||||
}
|
||||
|
||||
// test literal_pattern
|
||||
// fn main() {
|
||||
// match () {
|
||||
// -1 => (),
|
||||
// 92 => (),
|
||||
// 'c' => (),
|
||||
// "hello" => (),
|
||||
// }
|
||||
// }
|
||||
fn literal_pat(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(is_literal_pat_start(p));
|
||||
let m = p.start();
|
||||
if p.at(MINUS) {
|
||||
p.bump();
|
||||
}
|
||||
expressions::literal(p);
|
||||
m.complete(p, LITERAL_PAT)
|
||||
}
|
||||
|
||||
// test path_part
|
||||
// fn foo() {
|
||||
// let foo::Bar = ();
|
||||
// let ::Bar = ();
|
||||
// let Bar { .. } = ();
|
||||
// let Bar(..) = ();
|
||||
// }
|
||||
fn path_pat(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(paths::is_path_start(p));
|
||||
let m = p.start();
|
||||
paths::expr_path(p);
|
||||
let kind = match p.current() {
|
||||
L_PAREN => {
|
||||
tuple_pat_fields(p);
|
||||
TUPLE_STRUCT_PAT
|
||||
}
|
||||
L_CURLY => {
|
||||
field_pat_list(p);
|
||||
STRUCT_PAT
|
||||
}
|
||||
_ => PATH_PAT,
|
||||
};
|
||||
m.complete(p, kind)
|
||||
}
|
||||
|
||||
// test tuple_pat_fields
|
||||
// fn foo() {
|
||||
// let S() = ();
|
||||
// let S(_) = ();
|
||||
// let S(_,) = ();
|
||||
// let S(_, .. , x) = ();
|
||||
// }
|
||||
fn tuple_pat_fields(p: &mut Parser) {
|
||||
assert!(p.at(L_PAREN));
|
||||
p.bump();
|
||||
pat_list(p, R_PAREN);
|
||||
p.expect(R_PAREN);
|
||||
}
|
||||
|
||||
// test field_pat_list
|
||||
// fn foo() {
|
||||
// let S {} = ();
|
||||
// let S { f, ref mut g } = ();
|
||||
// let S { h: _, ..} = ();
|
||||
// let S { h: _, } = ();
|
||||
// }
|
||||
fn field_pat_list(p: &mut Parser) {
|
||||
assert!(p.at(L_CURLY));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||
match p.current() {
|
||||
DOTDOT => p.bump(),
|
||||
IDENT if p.nth(1) == COLON => field_pat(p),
|
||||
L_CURLY => error_block(p, "expected ident"),
|
||||
_ => {
|
||||
bind_pat(p, false);
|
||||
}
|
||||
}
|
||||
if !p.at(R_CURLY) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
p.expect(R_CURLY);
|
||||
m.complete(p, FIELD_PAT_LIST);
|
||||
}
|
||||
|
||||
fn field_pat(p: &mut Parser) {
|
||||
assert!(p.at(IDENT));
|
||||
assert!(p.nth(1) == COLON);
|
||||
|
||||
let m = p.start();
|
||||
name(p);
|
||||
p.bump();
|
||||
pattern(p);
|
||||
m.complete(p, FIELD_PAT);
|
||||
}
|
||||
|
||||
// test placeholder_pat
|
||||
// fn main() { let _ = (); }
|
||||
fn placeholder_pat(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(UNDERSCORE));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
m.complete(p, PLACEHOLDER_PAT)
|
||||
}
|
||||
|
||||
// test ref_pat
|
||||
// fn main() {
|
||||
// let &a = ();
|
||||
// let &mut b = ();
|
||||
// }
|
||||
fn ref_pat(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(AMP));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
p.eat(MUT_KW);
|
||||
pattern(p);
|
||||
m.complete(p, REF_PAT)
|
||||
}
|
||||
|
||||
// test tuple_pat
|
||||
// fn main() {
|
||||
// let (a, b, ..) = ();
|
||||
// }
|
||||
fn tuple_pat(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(L_PAREN));
|
||||
let m = p.start();
|
||||
tuple_pat_fields(p);
|
||||
m.complete(p, TUPLE_PAT)
|
||||
}
|
||||
|
||||
// test slice_pat
|
||||
// fn main() {
|
||||
// let [a, b, ..] = [];
|
||||
// }
|
||||
fn slice_pat(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(p.at(L_BRACK));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
pat_list(p, R_BRACK);
|
||||
p.expect(R_BRACK);
|
||||
m.complete(p, SLICE_PAT)
|
||||
}
|
||||
|
||||
fn pat_list(p: &mut Parser, ket: SyntaxKind) {
|
||||
while !p.at(EOF) && !p.at(ket) {
|
||||
match p.current() {
|
||||
DOTDOT => p.bump(),
|
||||
_ => {
|
||||
if !p.at_ts(PATTERN_FIRST) {
|
||||
p.error("expected a pattern");
|
||||
break;
|
||||
}
|
||||
pattern(p)
|
||||
}
|
||||
}
|
||||
if !p.at(ket) {
|
||||
p.expect(COMMA);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// test bind_pat
|
||||
// fn main() {
|
||||
// let a = ();
|
||||
// let mut b = ();
|
||||
// let ref c = ();
|
||||
// let ref mut d = ();
|
||||
// let e @ _ = ();
|
||||
// let ref mut f @ g @ _ = ();
|
||||
// }
|
||||
fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker {
|
||||
let m = p.start();
|
||||
p.eat(REF_KW);
|
||||
p.eat(MUT_KW);
|
||||
name(p);
|
||||
if with_at && p.eat(AT) {
|
||||
pattern(p);
|
||||
}
|
||||
m.complete(p, BIND_PAT)
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) {
|
||||
let m;
|
||||
match (colon_colon_required, p.nth(0), p.nth(1)) {
|
||||
(_, COLONCOLON, L_ANGLE) => {
|
||||
m = p.start();
|
||||
p.bump();
|
||||
p.bump();
|
||||
}
|
||||
(false, L_ANGLE, _) => {
|
||||
m = p.start();
|
||||
p.bump();
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
|
||||
while !p.at(EOF) && !p.at(R_ANGLE) {
|
||||
type_arg(p);
|
||||
if !p.at(R_ANGLE) && !p.expect(COMMA) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
p.expect(R_ANGLE);
|
||||
m.complete(p, TYPE_ARG_LIST);
|
||||
}
|
||||
|
||||
// test type_arg
|
||||
// type A = B<'static, i32, Item=u64>;
|
||||
fn type_arg(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
match p.current() {
|
||||
LIFETIME => {
|
||||
p.bump();
|
||||
m.complete(p, LIFETIME_ARG);
|
||||
}
|
||||
IDENT if p.nth(1) == EQ => {
|
||||
name_ref(p);
|
||||
p.bump();
|
||||
types::type_(p);
|
||||
m.complete(p, ASSOC_TYPE_ARG);
|
||||
}
|
||||
_ => {
|
||||
types::type_(p);
|
||||
m.complete(p, TYPE_ARG);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,175 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) fn opt_type_param_list(p: &mut Parser) {
|
||||
if !p.at(L_ANGLE) {
|
||||
return;
|
||||
}
|
||||
type_param_list(p);
|
||||
}
|
||||
|
||||
fn type_param_list(p: &mut Parser) {
|
||||
assert!(p.at(L_ANGLE));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
|
||||
while !p.at(EOF) && !p.at(R_ANGLE) {
|
||||
let m = p.start();
|
||||
|
||||
// test generic_lifetime_type_attribute
|
||||
// fn foo<#[derive(Lifetime)] 'a, #[derive(Type)] T>(_: &'a T) {
|
||||
// }
|
||||
attributes::outer_attributes(p);
|
||||
|
||||
match p.current() {
|
||||
LIFETIME => lifetime_param(p, m),
|
||||
IDENT => type_param(p, m),
|
||||
_ => {
|
||||
m.abandon(p);
|
||||
p.err_and_bump("expected type parameter")
|
||||
}
|
||||
}
|
||||
if !p.at(R_ANGLE) && !p.expect(COMMA) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
p.expect(R_ANGLE);
|
||||
m.complete(p, TYPE_PARAM_LIST);
|
||||
}
|
||||
|
||||
fn lifetime_param(p: &mut Parser, m: Marker) {
|
||||
assert!(p.at(LIFETIME));
|
||||
p.bump();
|
||||
if p.at(COLON) {
|
||||
lifetime_bounds(p);
|
||||
}
|
||||
m.complete(p, LIFETIME_PARAM);
|
||||
}
|
||||
|
||||
fn type_param(p: &mut Parser, m: Marker) {
|
||||
assert!(p.at(IDENT));
|
||||
name(p);
|
||||
if p.at(COLON) {
|
||||
bounds(p);
|
||||
}
|
||||
// test type_param_default
|
||||
// struct S<T = i32>;
|
||||
if p.at(EQ) {
|
||||
p.bump();
|
||||
types::type_(p)
|
||||
}
|
||||
m.complete(p, TYPE_PARAM);
|
||||
}
|
||||
|
||||
// test type_param_bounds
|
||||
// struct S<T: 'a + ?Sized + (Copy)>;
|
||||
pub(super) fn bounds(p: &mut Parser) {
|
||||
assert!(p.at(COLON));
|
||||
p.bump();
|
||||
bounds_without_colon(p);
|
||||
}
|
||||
|
||||
fn lifetime_bounds(p: &mut Parser) {
|
||||
assert!(p.at(COLON));
|
||||
p.bump();
|
||||
while p.at(LIFETIME) {
|
||||
p.bump();
|
||||
if !p.eat(PLUS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn bounds_without_colon(p: &mut Parser) {
|
||||
loop {
|
||||
let has_paren = p.eat(L_PAREN);
|
||||
p.eat(QUESTION);
|
||||
match p.current() {
|
||||
LIFETIME => p.bump(),
|
||||
FOR_KW => types::for_type(p),
|
||||
_ if paths::is_path_start(p) => types::path_type(p),
|
||||
_ => break,
|
||||
}
|
||||
if has_paren {
|
||||
p.expect(R_PAREN);
|
||||
}
|
||||
if !p.eat(PLUS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// test where_clause
|
||||
// fn foo()
|
||||
// where
|
||||
// 'a: 'b + 'c,
|
||||
// T: Clone + Copy + 'static,
|
||||
// Iterator::Item: 'a,
|
||||
// <T as Iterator>::Item: 'a
|
||||
// {}
|
||||
pub(super) fn opt_where_clause(p: &mut Parser) {
|
||||
if !p.at(WHERE_KW) {
|
||||
return;
|
||||
}
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
|
||||
while is_where_predicate(p) {
|
||||
where_predicate(p);
|
||||
|
||||
let comma = p.eat(COMMA);
|
||||
|
||||
if is_where_clause_end(p) {
|
||||
break;
|
||||
}
|
||||
|
||||
if !comma {
|
||||
p.error("expected comma");
|
||||
}
|
||||
}
|
||||
|
||||
m.complete(p, WHERE_CLAUSE);
|
||||
}
|
||||
|
||||
fn is_where_predicate(p: &mut Parser) -> bool {
|
||||
match p.current() {
|
||||
LIFETIME => true,
|
||||
IMPL_KW => false,
|
||||
token => types::TYPE_FIRST.contains(token),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_where_clause_end(p: &mut Parser) -> bool {
|
||||
p.current() == L_CURLY || p.current() == SEMI || p.current() == EQ
|
||||
}
|
||||
|
||||
fn where_predicate(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
match p.current() {
|
||||
LIFETIME => {
|
||||
p.bump();
|
||||
if p.at(COLON) {
|
||||
lifetime_bounds(p);
|
||||
} else {
|
||||
p.error("expected colon");
|
||||
}
|
||||
}
|
||||
IMPL_KW => {
|
||||
p.error("expected lifetime or type");
|
||||
}
|
||||
_ => {
|
||||
// test where_pred_for
|
||||
// fn test<F>()
|
||||
// where
|
||||
// for<'a> F: Fn(&'a str)
|
||||
// { }
|
||||
types::type_(p);
|
||||
|
||||
if p.at(COLON) {
|
||||
bounds(p);
|
||||
} else {
|
||||
p.error("expected colon");
|
||||
}
|
||||
}
|
||||
}
|
||||
m.complete(p, WHERE_PRED);
|
||||
}
|
|
@ -1,278 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
|
||||
L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW,
|
||||
DYN_KW, L_ANGLE,
|
||||
]);
|
||||
|
||||
const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA];
|
||||
|
||||
pub(super) fn type_(p: &mut Parser) {
|
||||
type_with_bounds_cond(p, true);
|
||||
}
|
||||
|
||||
pub(super) fn type_no_bounds(p: &mut Parser) {
|
||||
type_with_bounds_cond(p, false);
|
||||
}
|
||||
|
||||
fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) {
|
||||
match p.current() {
|
||||
L_PAREN => paren_or_tuple_type(p),
|
||||
EXCL => never_type(p),
|
||||
STAR => pointer_type(p),
|
||||
L_BRACK => array_or_slice_type(p),
|
||||
AMP => reference_type(p),
|
||||
UNDERSCORE => placeholder_type(p),
|
||||
FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p),
|
||||
FOR_KW => for_type(p),
|
||||
IMPL_KW => impl_trait_type(p),
|
||||
DYN_KW => dyn_trait_type(p),
|
||||
// Some path types are not allowed to have bounds (no plus)
|
||||
L_ANGLE => path_type_(p, allow_bounds),
|
||||
_ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds),
|
||||
_ => {
|
||||
p.err_recover("expected type", TYPE_RECOVERY_SET);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn ascription(p: &mut Parser) {
|
||||
p.expect(COLON);
|
||||
type_(p)
|
||||
}
|
||||
|
||||
fn paren_or_tuple_type(p: &mut Parser) {
|
||||
assert!(p.at(L_PAREN));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
let mut n_types: u32 = 0;
|
||||
let mut trailing_comma: bool = false;
|
||||
while !p.at(EOF) && !p.at(R_PAREN) {
|
||||
n_types += 1;
|
||||
type_(p);
|
||||
if p.eat(COMMA) {
|
||||
trailing_comma = true;
|
||||
} else {
|
||||
trailing_comma = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
p.expect(R_PAREN);
|
||||
|
||||
let kind = if n_types == 1 && !trailing_comma {
|
||||
// test paren_type
|
||||
// type T = (i32);
|
||||
PAREN_TYPE
|
||||
} else {
|
||||
// test unit_type
|
||||
// type T = ();
|
||||
|
||||
// test singleton_tuple_type
|
||||
// type T = (i32,);
|
||||
TUPLE_TYPE
|
||||
};
|
||||
m.complete(p, kind);
|
||||
}
|
||||
|
||||
// test never_type
|
||||
// type Never = !;
|
||||
fn never_type(p: &mut Parser) {
|
||||
assert!(p.at(EXCL));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
m.complete(p, NEVER_TYPE);
|
||||
}
|
||||
|
||||
fn pointer_type(p: &mut Parser) {
|
||||
assert!(p.at(STAR));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
|
||||
match p.current() {
|
||||
// test pointer_type_mut
|
||||
// type M = *mut ();
|
||||
// type C = *mut ();
|
||||
MUT_KW | CONST_KW => p.bump(),
|
||||
_ => {
|
||||
// test_err pointer_type_no_mutability
|
||||
// type T = *();
|
||||
p.error(
|
||||
"expected mut or const in raw pointer type \
|
||||
(use `*mut T` or `*const T` as appropriate)",
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
type_no_bounds(p);
|
||||
m.complete(p, POINTER_TYPE);
|
||||
}
|
||||
|
||||
fn array_or_slice_type(p: &mut Parser) {
|
||||
assert!(p.at(L_BRACK));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
|
||||
type_(p);
|
||||
let kind = match p.current() {
|
||||
// test slice_type
|
||||
// type T = [()];
|
||||
R_BRACK => {
|
||||
p.bump();
|
||||
SLICE_TYPE
|
||||
}
|
||||
|
||||
// test array_type
|
||||
// type T = [(); 92];
|
||||
SEMI => {
|
||||
p.bump();
|
||||
expressions::expr(p);
|
||||
p.expect(R_BRACK);
|
||||
ARRAY_TYPE
|
||||
}
|
||||
// test_err array_type_missing_semi
|
||||
// type T = [() 92];
|
||||
_ => {
|
||||
p.error("expected `;` or `]`");
|
||||
SLICE_TYPE
|
||||
}
|
||||
};
|
||||
m.complete(p, kind);
|
||||
}
|
||||
|
||||
// test reference_type;
|
||||
// type A = &();
|
||||
// type B = &'static ();
|
||||
// type C = &mut ();
|
||||
fn reference_type(p: &mut Parser) {
|
||||
assert!(p.at(AMP));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
p.eat(LIFETIME);
|
||||
p.eat(MUT_KW);
|
||||
type_no_bounds(p);
|
||||
m.complete(p, REFERENCE_TYPE);
|
||||
}
|
||||
|
||||
// test placeholder_type
|
||||
// type Placeholder = _;
|
||||
fn placeholder_type(p: &mut Parser) {
|
||||
assert!(p.at(UNDERSCORE));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
m.complete(p, PLACEHOLDER_TYPE);
|
||||
}
|
||||
|
||||
// test fn_pointer_type
|
||||
// type A = fn();
|
||||
// type B = unsafe fn();
|
||||
// type C = unsafe extern "C" fn();
|
||||
fn fn_pointer_type(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
p.eat(UNSAFE_KW);
|
||||
if p.at(EXTERN_KW) {
|
||||
abi(p);
|
||||
}
|
||||
// test_err fn_pointer_type_missing_fn
|
||||
// type F = unsafe ();
|
||||
if !p.eat(FN_KW) {
|
||||
m.abandon(p);
|
||||
p.error("expected `fn`");
|
||||
return;
|
||||
}
|
||||
if p.at(L_PAREN) {
|
||||
params::param_list_opt_patterns(p);
|
||||
} else {
|
||||
p.error("expected parameters")
|
||||
}
|
||||
// test fn_pointer_type_with_ret
|
||||
// type F = fn() -> ();
|
||||
opt_fn_ret_type(p);
|
||||
m.complete(p, FN_POINTER_TYPE);
|
||||
}
|
||||
|
||||
pub(super) fn for_binder(p: &mut Parser) {
|
||||
assert!(p.at(FOR_KW));
|
||||
p.bump();
|
||||
if p.at(L_ANGLE) {
|
||||
type_params::opt_type_param_list(p);
|
||||
} else {
|
||||
p.error("expected `<`");
|
||||
}
|
||||
}
|
||||
|
||||
// test for_type
|
||||
// type A = for<'a> fn() -> ();
|
||||
pub(super) fn for_type(p: &mut Parser) {
|
||||
assert!(p.at(FOR_KW));
|
||||
let m = p.start();
|
||||
for_binder(p);
|
||||
match p.current() {
|
||||
FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p),
|
||||
_ if paths::is_path_start(p) => path_type_(p, false),
|
||||
_ => p.error("expected a path"),
|
||||
}
|
||||
m.complete(p, FOR_TYPE);
|
||||
}
|
||||
|
||||
// test impl_trait_type
|
||||
// type A = impl Iterator<Item=Foo<'a>> + 'a;
|
||||
fn impl_trait_type(p: &mut Parser) {
|
||||
assert!(p.at(IMPL_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
type_params::bounds_without_colon(p);
|
||||
m.complete(p, IMPL_TRAIT_TYPE);
|
||||
}
|
||||
|
||||
// test dyn_trait_type
|
||||
// type A = dyn Iterator<Item=Foo<'a>> + 'a;
|
||||
fn dyn_trait_type(p: &mut Parser) {
|
||||
assert!(p.at(DYN_KW));
|
||||
let m = p.start();
|
||||
p.bump();
|
||||
type_params::bounds_without_colon(p);
|
||||
m.complete(p, DYN_TRAIT_TYPE);
|
||||
}
|
||||
|
||||
// test path_type
|
||||
// type A = Foo;
|
||||
// type B = ::Foo;
|
||||
// type C = self::Foo;
|
||||
// type D = super::Foo;
|
||||
pub(super) fn path_type(p: &mut Parser) {
|
||||
path_type_(p, true)
|
||||
}
|
||||
|
||||
// test macro_call_type
|
||||
// type A = foo!();
|
||||
// type B = crate::foo!();
|
||||
fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) {
|
||||
assert!(paths::is_path_start(p) || p.at(L_ANGLE));
|
||||
let m = p.start();
|
||||
paths::type_path(p);
|
||||
|
||||
let kind = if p.at(EXCL) {
|
||||
items::macro_call_after_excl(p);
|
||||
MACRO_CALL
|
||||
} else {
|
||||
PATH_TYPE
|
||||
};
|
||||
|
||||
if allow_bounds && p.eat(PLUS) {
|
||||
type_params::bounds_without_colon(p);
|
||||
}
|
||||
|
||||
m.complete(p, kind);
|
||||
}
|
||||
|
||||
pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) {
|
||||
assert!(paths::is_path_start(p) || p.at(L_ANGLE));
|
||||
let m = p.start();
|
||||
paths::type_path(p);
|
||||
// test path_type_with_bounds
|
||||
// fn foo() -> Box<T + 'f> {}
|
||||
if allow_bounds && p.eat(PLUS) {
|
||||
type_params::bounds_without_colon(p);
|
||||
}
|
||||
m.complete(p, PATH_TYPE);
|
||||
}
|
|
@ -1,270 +0,0 @@
|
|||
use std::cell::Cell;
|
||||
|
||||
use drop_bomb::DropBomb;
|
||||
|
||||
use crate::{
|
||||
SyntaxKind::{self, ERROR, EOF, TOMBSTONE},
|
||||
parsing::{
|
||||
TokenSource, ParseError,
|
||||
token_set::TokenSet,
|
||||
event::Event,
|
||||
},
|
||||
};
|
||||
|
||||
/// `Parser` struct provides the low-level API for
|
||||
/// navigating through the stream of tokens and
|
||||
/// constructing the parse tree. The actual parsing
|
||||
/// happens in the `grammar` module.
|
||||
///
|
||||
/// However, the result of this `Parser` is not a real
|
||||
/// tree, but rather a flat stream of events of the form
|
||||
/// "start expression, consume number literal,
|
||||
/// finish expression". See `Event` docs for more.
|
||||
pub(crate) struct Parser<'t> {
|
||||
token_source: &'t dyn TokenSource,
|
||||
token_pos: usize,
|
||||
events: Vec<Event>,
|
||||
steps: Cell<u32>,
|
||||
}
|
||||
|
||||
impl<'t> Parser<'t> {
|
||||
pub(super) fn new(token_source: &'t dyn TokenSource) -> Parser<'t> {
|
||||
Parser { token_source, token_pos: 0, events: Vec::new(), steps: Cell::new(0) }
|
||||
}
|
||||
|
||||
pub(crate) fn finish(self) -> Vec<Event> {
|
||||
self.events
|
||||
}
|
||||
|
||||
/// Returns the kind of the current token.
|
||||
/// If parser has already reached the end of input,
|
||||
/// the special `EOF` kind is returned.
|
||||
pub(crate) fn current(&self) -> SyntaxKind {
|
||||
self.nth(0)
|
||||
}
|
||||
|
||||
/// Returns the kinds of the current two tokens, if they are not separated
|
||||
/// by trivia.
|
||||
///
|
||||
/// Useful for parsing things like `>>`.
|
||||
pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> {
|
||||
let c1 = self.token_source.token_kind(self.token_pos);
|
||||
let c2 = self.token_source.token_kind(self.token_pos + 1);
|
||||
if self.token_source.is_token_joint_to_next(self.token_pos) {
|
||||
Some((c1, c2))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the kinds of the current three tokens, if they are not separated
|
||||
/// by trivia.
|
||||
///
|
||||
/// Useful for parsing things like `=>>`.
|
||||
pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> {
|
||||
let c1 = self.token_source.token_kind(self.token_pos);
|
||||
let c2 = self.token_source.token_kind(self.token_pos + 1);
|
||||
let c3 = self.token_source.token_kind(self.token_pos + 2);
|
||||
if self.token_source.is_token_joint_to_next(self.token_pos)
|
||||
&& self.token_source.is_token_joint_to_next(self.token_pos + 1)
|
||||
{
|
||||
Some((c1, c2, c3))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Lookahead operation: returns the kind of the next nth
|
||||
/// token.
|
||||
pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
|
||||
let steps = self.steps.get();
|
||||
assert!(steps <= 10_000_000, "the parser seems stuck");
|
||||
self.steps.set(steps + 1);
|
||||
self.token_source.token_kind(self.token_pos + n)
|
||||
}
|
||||
|
||||
/// Checks if the current token is `kind`.
|
||||
pub(crate) fn at(&self, kind: SyntaxKind) -> bool {
|
||||
self.current() == kind
|
||||
}
|
||||
|
||||
/// Checks if the current token is in `kinds`.
|
||||
pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool {
|
||||
kinds.contains(self.current())
|
||||
}
|
||||
|
||||
/// Checks if the current token is contextual keyword with text `t`.
|
||||
pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool {
|
||||
self.token_source.is_keyword(self.token_pos, kw)
|
||||
}
|
||||
|
||||
/// Starts a new node in the syntax tree. All nodes and tokens
|
||||
/// consumed between the `start` and the corresponding `Marker::complete`
|
||||
/// belong to the same node.
|
||||
pub(crate) fn start(&mut self) -> Marker {
|
||||
let pos = self.events.len() as u32;
|
||||
self.push_event(Event::tombstone());
|
||||
Marker::new(pos)
|
||||
}
|
||||
|
||||
/// Advances the parser by one token unconditionally.
|
||||
pub(crate) fn bump(&mut self) {
|
||||
let kind = self.nth(0);
|
||||
if kind == EOF {
|
||||
return;
|
||||
}
|
||||
self.do_bump(kind, 1);
|
||||
}
|
||||
|
||||
/// Advances the parser by one token, remapping its kind.
|
||||
/// This is useful to create contextual keywords from
|
||||
/// identifiers. For example, the lexer creates an `union`
|
||||
/// *identifier* token, but the parser remaps it to the
|
||||
/// `union` keyword, and keyword is what ends up in the
|
||||
/// final tree.
|
||||
pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) {
|
||||
if self.nth(0) == EOF {
|
||||
// TODO: panic!?
|
||||
return;
|
||||
}
|
||||
self.do_bump(kind, 1);
|
||||
}
|
||||
|
||||
/// Advances the parser by `n` tokens, remapping its kind.
|
||||
/// This is useful to create compound tokens from parts. For
|
||||
/// example, an `<<` token is two consecutive remapped `<` tokens
|
||||
pub(crate) fn bump_compound(&mut self, kind: SyntaxKind, n: u8) {
|
||||
self.do_bump(kind, n);
|
||||
}
|
||||
|
||||
/// Emit error with the `message`
|
||||
/// TODO: this should be much more fancy and support
|
||||
/// structured errors with spans and notes, like rustc
|
||||
/// does.
|
||||
pub(crate) fn error<T: Into<String>>(&mut self, message: T) {
|
||||
let msg = ParseError(message.into());
|
||||
self.push_event(Event::Error { msg })
|
||||
}
|
||||
|
||||
/// Consume the next token if `kind` matches.
|
||||
pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
|
||||
if !self.at(kind) {
|
||||
return false;
|
||||
}
|
||||
self.bump();
|
||||
true
|
||||
}
|
||||
|
||||
/// Consume the next token if it is `kind` or emit an error
|
||||
/// otherwise.
|
||||
pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
|
||||
if self.eat(kind) {
|
||||
return true;
|
||||
}
|
||||
self.error(format!("expected {:?}", kind));
|
||||
false
|
||||
}
|
||||
|
||||
/// Create an error node and consume the next token.
|
||||
pub(crate) fn err_and_bump(&mut self, message: &str) {
|
||||
self.err_recover(message, TokenSet::empty());
|
||||
}
|
||||
|
||||
/// Create an error node and consume the next token.
|
||||
pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) {
|
||||
if self.at(SyntaxKind::L_CURLY) || self.at(SyntaxKind::R_CURLY) || self.at_ts(recovery) {
|
||||
self.error(message);
|
||||
} else {
|
||||
let m = self.start();
|
||||
self.error(message);
|
||||
self.bump();
|
||||
m.complete(self, ERROR);
|
||||
};
|
||||
}
|
||||
|
||||
fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
|
||||
self.token_pos += usize::from(n_raw_tokens);
|
||||
self.push_event(Event::Token { kind, n_raw_tokens });
|
||||
}
|
||||
|
||||
fn push_event(&mut self, event: Event) {
|
||||
self.events.push(event)
|
||||
}
|
||||
}
|
||||
|
||||
/// See `Parser::start`.
|
||||
pub(crate) struct Marker {
|
||||
pos: u32,
|
||||
bomb: DropBomb,
|
||||
}
|
||||
|
||||
impl Marker {
|
||||
fn new(pos: u32) -> Marker {
|
||||
Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") }
|
||||
}
|
||||
|
||||
/// Finishes the syntax tree node and assigns `kind` to it,
|
||||
/// and mark the create a `CompletedMarker` for possible future
|
||||
/// operation like `.precede()` to deal with forward_parent.
|
||||
pub(crate) fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker {
|
||||
self.bomb.defuse();
|
||||
let idx = self.pos as usize;
|
||||
match p.events[idx] {
|
||||
Event::Start { kind: ref mut slot, .. } => {
|
||||
*slot = kind;
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
p.push_event(Event::Finish);
|
||||
CompletedMarker::new(self.pos, kind)
|
||||
}
|
||||
|
||||
/// Abandons the syntax tree node. All its children
|
||||
/// are attached to its parent instead.
|
||||
pub(crate) fn abandon(mut self, p: &mut Parser) {
|
||||
self.bomb.defuse();
|
||||
let idx = self.pos as usize;
|
||||
if idx == p.events.len() - 1 {
|
||||
match p.events.pop() {
|
||||
Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct CompletedMarker(u32, SyntaxKind);
|
||||
|
||||
impl CompletedMarker {
|
||||
fn new(pos: u32, kind: SyntaxKind) -> Self {
|
||||
CompletedMarker(pos, kind)
|
||||
}
|
||||
|
||||
/// This method allows to create a new node which starts
|
||||
/// *before* the current one. That is, parser could start
|
||||
/// node `A`, then complete it, and then after parsing the
|
||||
/// whole `A`, decide that it should have started some node
|
||||
/// `B` before starting `A`. `precede` allows to do exactly
|
||||
/// that. See also docs about `forward_parent` in `Event::Start`.
|
||||
///
|
||||
/// Given completed events `[START, FINISH]` and its corresponding
|
||||
/// `CompletedMarker(pos: 0, _)`.
|
||||
/// Append a new `START` events as `[START, FINISH, NEWSTART]`,
|
||||
/// then mark `NEWSTART` as `START`'s parent with saving its relative
|
||||
/// distance to `NEWSTART` into forward_parent(=2 in this case);
|
||||
pub(crate) fn precede(self, p: &mut Parser) -> Marker {
|
||||
let new_pos = p.start();
|
||||
let idx = self.0 as usize;
|
||||
match p.events[idx] {
|
||||
Event::Start { ref mut forward_parent, .. } => {
|
||||
*forward_parent = Some(new_pos.pos - self.0);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
new_pos
|
||||
}
|
||||
|
||||
pub(crate) fn kind(&self) -> SyntaxKind {
|
||||
self.1
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
use crate::SyntaxKind;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) struct TokenSet(u128);
|
||||
|
||||
impl TokenSet {
|
||||
pub(crate) const fn empty() -> TokenSet {
|
||||
TokenSet(0)
|
||||
}
|
||||
|
||||
pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet {
|
||||
TokenSet(mask(kind))
|
||||
}
|
||||
|
||||
pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
|
||||
TokenSet(self.0 | other.0)
|
||||
}
|
||||
|
||||
pub(crate) fn contains(&self, kind: SyntaxKind) -> bool {
|
||||
self.0 & mask(kind) != 0
|
||||
}
|
||||
}
|
||||
|
||||
const fn mask(kind: SyntaxKind) -> u128 {
|
||||
1u128 << (kind as usize)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! token_set {
|
||||
($($t:ident),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* };
|
||||
($($t:ident),* ,) => { token_set!($($t),*) };
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn token_set_works_for_tokens() {
|
||||
use crate::SyntaxKind::*;
|
||||
let ts = token_set![EOF, SHEBANG];
|
||||
assert!(ts.contains(EOF));
|
||||
assert!(ts.contains(SHEBANG));
|
||||
assert!(!ts.contains(PLUS));
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue