Merge commit 'aa9bc86125' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-06-05 12:04:23 +03:00
parent 1570299af4
commit c48062fe2a
598 changed files with 57696 additions and 17615 deletions

View file

@ -66,6 +66,10 @@ pub(crate) mod entry {
patterns::pattern_single(p);
}
pub(crate) fn pat_top(p: &mut Parser<'_>) {
patterns::pattern_top(p);
}
pub(crate) fn ty(p: &mut Parser<'_>) {
types::type_(p);
}
@ -218,17 +222,22 @@ fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool {
// pub(self) struct S;
// pub(super) struct S;
// test_err crate_visibility_empty_recover
// pub() struct S;
// test pub_parens_typepath
// struct B(pub (super::A));
// struct B(pub (crate::A,));
T![crate] | T![self] | T![super] | T![ident] if p.nth(2) != T![:] => {
T![crate] | T![self] | T![super] | T![ident] | T![')'] if p.nth(2) != T![:] => {
// If we are in a tuple struct, then the parens following `pub`
// might be an tuple field, not part of the visibility. So in that
// case we don't want to consume an identifier.
// test pub_tuple_field
// struct MyStruct(pub (u32, u32));
if !(in_tuple_field && matches!(p.nth(1), T![ident])) {
// struct MyStruct(pub (u32));
// struct MyStruct(pub ());
if !(in_tuple_field && matches!(p.nth(1), T![ident] | T![')'])) {
p.bump(T!['(']);
paths::use_path(p);
p.expect(T![')']);
@ -243,7 +252,7 @@ fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool {
paths::use_path(p);
p.expect(T![')']);
}
_ => (),
_ => {}
}
}
m.complete(p, VISIBILITY);

View file

@ -4,8 +4,8 @@ use crate::grammar::attributes::ATTRIBUTE_FIRST;
use super::*;
pub(crate) use self::atom::{block_expr, match_arm_list};
pub(super) use self::atom::{literal, LITERAL_FIRST};
pub(crate) use atom::{block_expr, match_arm_list};
pub(super) use atom::{literal, LITERAL_FIRST};
#[derive(PartialEq, Eq)]
pub(super) enum Semicolon {
@ -188,47 +188,56 @@ struct Restrictions {
prefer_stmt: bool,
}
enum Associativity {
Left,
Right,
}
/// Binding powers of operators for a Pratt parser.
///
/// See <https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html>
///
/// Note that Rust doesn't define associativity for some infix operators (e.g. `==` and `..`) and
/// requires parentheses to disambiguate. We just treat them as left associative.
#[rustfmt::skip]
fn current_op(p: &Parser<'_>) -> (u8, SyntaxKind) {
const NOT_AN_OP: (u8, SyntaxKind) = (0, T![@]);
fn current_op(p: &Parser<'_>) -> (u8, SyntaxKind, Associativity) {
use Associativity::*;
const NOT_AN_OP: (u8, SyntaxKind, Associativity) = (0, T![@], Left);
match p.current() {
T![|] if p.at(T![||]) => (3, T![||]),
T![|] if p.at(T![|=]) => (1, T![|=]),
T![|] => (6, T![|]),
T![>] if p.at(T![>>=]) => (1, T![>>=]),
T![>] if p.at(T![>>]) => (9, T![>>]),
T![>] if p.at(T![>=]) => (5, T![>=]),
T![>] => (5, T![>]),
T![|] if p.at(T![||]) => (3, T![||], Left),
T![|] if p.at(T![|=]) => (1, T![|=], Right),
T![|] => (6, T![|], Left),
T![>] if p.at(T![>>=]) => (1, T![>>=], Right),
T![>] if p.at(T![>>]) => (9, T![>>], Left),
T![>] if p.at(T![>=]) => (5, T![>=], Left),
T![>] => (5, T![>], Left),
T![=] if p.at(T![=>]) => NOT_AN_OP,
T![=] if p.at(T![==]) => (5, T![==]),
T![=] => (1, T![=]),
T![<] if p.at(T![<=]) => (5, T![<=]),
T![<] if p.at(T![<<=]) => (1, T![<<=]),
T![<] if p.at(T![<<]) => (9, T![<<]),
T![<] => (5, T![<]),
T![+] if p.at(T![+=]) => (1, T![+=]),
T![+] => (10, T![+]),
T![^] if p.at(T![^=]) => (1, T![^=]),
T![^] => (7, T![^]),
T![%] if p.at(T![%=]) => (1, T![%=]),
T![%] => (11, T![%]),
T![&] if p.at(T![&=]) => (1, T![&=]),
T![=] if p.at(T![==]) => (5, T![==], Left),
T![=] => (1, T![=], Right),
T![<] if p.at(T![<=]) => (5, T![<=], Left),
T![<] if p.at(T![<<=]) => (1, T![<<=], Right),
T![<] if p.at(T![<<]) => (9, T![<<], Left),
T![<] => (5, T![<], Left),
T![+] if p.at(T![+=]) => (1, T![+=], Right),
T![+] => (10, T![+], Left),
T![^] if p.at(T![^=]) => (1, T![^=], Right),
T![^] => (7, T![^], Left),
T![%] if p.at(T![%=]) => (1, T![%=], Right),
T![%] => (11, T![%], Left),
T![&] if p.at(T![&=]) => (1, T![&=], Right),
// If you update this, remember to update `expr_let()` too.
T![&] if p.at(T![&&]) => (4, T![&&]),
T![&] => (8, T![&]),
T![/] if p.at(T![/=]) => (1, T![/=]),
T![/] => (11, T![/]),
T![*] if p.at(T![*=]) => (1, T![*=]),
T![*] => (11, T![*]),
T![.] if p.at(T![..=]) => (2, T![..=]),
T![.] if p.at(T![..]) => (2, T![..]),
T![!] if p.at(T![!=]) => (5, T![!=]),
T![-] if p.at(T![-=]) => (1, T![-=]),
T![-] => (10, T![-]),
T![as] => (12, T![as]),
T![&] if p.at(T![&&]) => (4, T![&&], Left),
T![&] => (8, T![&], Left),
T![/] if p.at(T![/=]) => (1, T![/=], Right),
T![/] => (11, T![/], Left),
T![*] if p.at(T![*=]) => (1, T![*=], Right),
T![*] => (11, T![*], Left),
T![.] if p.at(T![..=]) => (2, T![..=], Left),
T![.] if p.at(T![..]) => (2, T![..], Left),
T![!] if p.at(T![!=]) => (5, T![!=], Left),
T![-] if p.at(T![-=]) => (1, T![-=], Right),
T![-] => (10, T![-], Left),
T![as] => (12, T![as], Left),
_ => NOT_AN_OP
}
@ -273,7 +282,7 @@ fn expr_bp(
loop {
let is_range = p.at(T![..]) || p.at(T![..=]);
let (op_bp, op) = current_op(p);
let (op_bp, op, associativity) = current_op(p);
if op_bp < bp {
break;
}
@ -306,7 +315,11 @@ fn expr_bp(
}
}
expr_bp(p, None, Restrictions { prefer_stmt: false, ..r }, op_bp + 1);
let op_bp = match associativity {
Associativity::Left => op_bp + 1,
Associativity::Right => op_bp,
};
expr_bp(p, None, Restrictions { prefer_stmt: false, ..r }, op_bp);
lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
}
Some((lhs, BlockLike::NotBlock))
@ -417,7 +430,7 @@ fn postfix_expr(
allow_calls = true;
block_like = BlockLike::NotBlock;
}
return (lhs, block_like);
(lhs, block_like)
}
fn postfix_dot_expr<const FLOAT_RECOVERY: bool>(

View file

@ -12,6 +12,8 @@ use super::*;
// let _ = r"d";
// let _ = b"e";
// let _ = br"f";
// let _ = c"g";
// let _ = cr"h";
// }
pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
T![true],
@ -22,6 +24,7 @@ pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
CHAR,
STRING,
BYTE_STRING,
C_STRING,
]);
pub(crate) fn literal(p: &mut Parser<'_>) -> Option<CompletedMarker> {
@ -181,6 +184,16 @@ fn tuple_expr(p: &mut Parser<'_>) -> CompletedMarker {
let mut saw_comma = false;
let mut saw_expr = false;
// test_err tuple_expr_leading_comma
// fn foo() {
// (,);
// }
if p.eat(T![,]) {
p.error("expected expression");
saw_comma = true;
}
while !p.at(EOF) && !p.at(T![')']) {
saw_expr = true;

View file

@ -28,6 +28,7 @@ const GENERIC_ARG_FIRST: TokenSet = TokenSet::new(&[
BYTE,
STRING,
BYTE_STRING,
C_STRING,
])
.union(types::TYPE_FIRST);
@ -35,7 +36,7 @@ const GENERIC_ARG_FIRST: TokenSet = TokenSet::new(&[
// type T = S<i32>;
fn generic_arg(p: &mut Parser<'_>) -> bool {
match p.current() {
LIFETIME_IDENT => lifetime_arg(p),
LIFETIME_IDENT if !p.nth_at(1, T![+]) => lifetime_arg(p),
T!['{'] | T![true] | T![false] | T![-] => const_arg(p),
k if k.is_literal() => const_arg(p),
// test associated_type_bounds
@ -76,6 +77,29 @@ fn generic_arg(p: &mut Parser<'_>) -> bool {
}
}
}
IDENT if p.nth_at(1, T!['(']) => {
let m = p.start();
name_ref(p);
params::param_list_fn_trait(p);
if p.at(T![:]) && !p.at(T![::]) {
// test associated_return_type_bounds
// fn foo<T: Foo<foo(): Send, bar(i32): Send, baz(i32, i32): Send>>() {}
generic_params::bounds(p);
m.complete(p, ASSOC_TYPE_ARG);
} else {
// test bare_dyn_types_with_paren_as_generic_args
// type A = S<Fn(i32)>;
// type A = S<Fn(i32) + Send>;
// type B = S<Fn(i32) -> i32>;
// type C = S<Fn(i32) -> i32 + Send>;
opt_ret_type(p);
let m = m.complete(p, PATH_SEGMENT).precede(p).complete(p, PATH);
let m = paths::type_path_for_qualifier(p, m);
let m = m.precede(p).complete(p, PATH_TYPE);
let m = types::opt_type_bounds_as_dyn_trait_type(p, m);
m.precede(p).complete(p, TYPE_ARG);
}
}
_ if p.at_ts(types::TYPE_FIRST) => type_arg(p),
_ => return false,
}

View file

@ -19,7 +19,7 @@ use super::*;
// struct S;
pub(super) fn mod_contents(p: &mut Parser<'_>, stop_on_r_curly: bool) {
attributes::inner_attrs(p);
while !p.at(EOF) && !(p.at(T!['}']) && stop_on_r_curly) {
while !(p.at(EOF) || (p.at(T!['}']) && stop_on_r_curly)) {
item_or_macro(p, stop_on_r_curly);
}
}

View file

@ -136,6 +136,7 @@ fn opt_path_type_args(p: &mut Parser<'_>, mode: Mode) {
Mode::Type => {
// test typepathfn_with_coloncolon
// type F = Start::(Middle) -> (Middle)::End;
// type GenericArg = S<Start(Middle)::End>;
if p.at(T![::]) && p.nth_at(2, T!['(']) {
p.bump(T![::]);
}

View file

@ -5,6 +5,7 @@ pub(super) const PATTERN_FIRST: TokenSet =
T![box],
T![ref],
T![mut],
T![const],
T!['('],
T!['['],
T![&],
@ -15,6 +16,10 @@ pub(super) const PATTERN_FIRST: TokenSet =
const PAT_TOP_FIRST: TokenSet = PATTERN_FIRST.union(TokenSet::new(&[T![|]]));
/// Set of possible tokens at the start of a range pattern's end bound.
const RANGE_PAT_END_FIRST: TokenSet =
expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[T![-], T![const]]));
pub(crate) fn pattern(p: &mut Parser<'_>) {
pattern_r(p, PAT_RECOVERY_SET);
}
@ -105,6 +110,52 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
return;
}
// test exclusive_range_pat
// fn main() {
// match 42 {
// ..0 => {}
// 1..2 => {}
// }
// }
// test dot_dot_pat
// fn main() {
// let .. = ();
// //
// // Tuples
// //
// let (a, ..) = ();
// let (a, ..,) = ();
// let Tuple(a, ..) = ();
// let Tuple(a, ..,) = ();
// let (.., ..) = ();
// let Tuple(.., ..) = ();
// let (.., a, ..) = ();
// let Tuple(.., a, ..) = ();
// //
// // Slices
// //
// let [..] = ();
// let [head, ..] = ();
// let [head, tail @ ..] = ();
// let [head, .., cons] = ();
// let [head, mid @ .., cons] = ();
// let [head, .., .., cons] = ();
// let [head, .., mid, tail @ ..] = ();
// let [head, .., mid, .., cons] = ();
// }
if p.at(T![..]) {
let m = p.start();
p.bump(T![..]);
if p.at_ts(RANGE_PAT_END_FIRST) {
atom_pat(p, recovery_set);
m.complete(p, RANGE_PAT);
} else {
m.complete(p, REST_PAT);
}
return;
}
if let Some(lhs) = atom_pat(p, recovery_set) {
for range_op in [T![...], T![..=], T![..]] {
if p.at(range_op) {
@ -173,7 +224,6 @@ fn atom_pat(p: &mut Parser<'_>, recovery_set: TokenSet) -> Option<CompletedMarke
_ if paths::is_path_start(p) => path_or_macro_pat(p),
_ if is_literal_pat_start(p) => literal_pat(p),
T![.] if p.at(T![..]) => rest_pat(p),
T![_] => wildcard_pat(p),
T![&] => ref_pat(p),
T!['('] => tuple_pat(p),
@ -334,39 +384,6 @@ fn wildcard_pat(p: &mut Parser<'_>) -> CompletedMarker {
m.complete(p, WILDCARD_PAT)
}
// test dot_dot_pat
// fn main() {
// let .. = ();
// //
// // Tuples
// //
// let (a, ..) = ();
// let (a, ..,) = ();
// let Tuple(a, ..) = ();
// let Tuple(a, ..,) = ();
// let (.., ..) = ();
// let Tuple(.., ..) = ();
// let (.., a, ..) = ();
// let Tuple(.., a, ..) = ();
// //
// // Slices
// //
// let [..] = ();
// let [head, ..] = ();
// let [head, tail @ ..] = ();
// let [head, .., cons] = ();
// let [head, mid @ .., cons] = ();
// let [head, .., .., cons] = ();
// let [head, .., mid, tail @ ..] = ();
// let [head, .., mid, .., cons] = ();
// }
fn rest_pat(p: &mut Parser<'_>) -> CompletedMarker {
assert!(p.at(T![..]));
let m = p.start();
p.bump(T![..]);
m.complete(p, REST_PAT)
}
// test ref_pat
// fn main() {
// let &a = ();
@ -396,6 +413,16 @@ fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
let mut has_comma = false;
let mut has_pat = false;
let mut has_rest = false;
// test_err tuple_pat_leading_comma
// fn foo() {
// let (,);
// }
if p.eat(T![,]) {
p.error("expected pattern");
has_comma = true;
}
while !p.at(EOF) && !p.at(T![')']) {
has_pat = true;
if !p.at_ts(PAT_TOP_FIRST) {
@ -483,6 +510,14 @@ fn box_pat(p: &mut Parser<'_>) -> CompletedMarker {
// fn main() {
// let const { 15 } = ();
// let const { foo(); bar() } = ();
//
// match 42 {
// const { 0 } .. const { 1 } => (),
// .. const { 0 } => (),
// const { 2 } .. => (),
// }
//
// let (const { () },) = ();
// }
fn const_block_pat(p: &mut Parser<'_>) -> CompletedMarker {
assert!(p.at(T![const]));

View file

@ -15,6 +15,7 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
T![impl],
T![dyn],
T![Self],
LIFETIME_IDENT,
]));
pub(super) const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
@ -49,6 +50,7 @@ fn type_with_bounds_cond(p: &mut Parser<'_>, allow_bounds: bool) {
// Some path types are not allowed to have bounds (no plus)
T![<] => path_type_(p, allow_bounds),
_ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds),
LIFETIME_IDENT if p.nth_at(1, T![+]) => bare_dyn_trait_type(p),
_ => {
p.err_recover("expected type", TYPE_RECOVERY_SET);
}
@ -59,7 +61,7 @@ pub(super) fn ascription(p: &mut Parser<'_>) {
assert!(p.at(T![:]));
p.bump(T![:]);
if p.at(T![=]) {
// recover from `let x: = expr;`, `const X: = expr;` and similars
// recover from `let x: = expr;`, `const X: = expr;` and similar
// hopefully no type starts with `=`
p.error("missing type");
return;
@ -275,6 +277,15 @@ fn dyn_trait_type(p: &mut Parser<'_>) {
m.complete(p, DYN_TRAIT_TYPE);
}
// test bare_dyn_types_with_leading_lifetime
// type A = 'static + Trait;
// type B = S<'static + Trait>;
fn bare_dyn_trait_type(p: &mut Parser<'_>) {
let m = p.start();
generic_params::bounds_without_colon(p);
m.complete(p, DYN_TRAIT_TYPE);
}
// test path_type
// type A = Foo;
// type B = ::Foo;
@ -326,13 +337,16 @@ pub(super) fn path_type_(p: &mut Parser<'_>, allow_bounds: bool) {
/// This turns a parsed PATH_TYPE or FOR_TYPE optionally into a DYN_TRAIT_TYPE
/// with a TYPE_BOUND_LIST
fn opt_type_bounds_as_dyn_trait_type(p: &mut Parser<'_>, type_marker: CompletedMarker) {
pub(super) fn opt_type_bounds_as_dyn_trait_type(
p: &mut Parser<'_>,
type_marker: CompletedMarker,
) -> CompletedMarker {
assert!(matches!(
type_marker.kind(),
SyntaxKind::PATH_TYPE | SyntaxKind::FOR_TYPE | SyntaxKind::MACRO_TYPE
));
if !p.at(T![+]) {
return;
return type_marker;
}
// First create a TYPE_BOUND from the completed PATH_TYPE
@ -349,5 +363,5 @@ fn opt_type_bounds_as_dyn_trait_type(p: &mut Parser<'_>, type_marker: CompletedM
let m = generic_params::bounds_without_colon_m(p, m);
// Finally precede everything with DYN_TRAIT_TYPE
m.precede(p).complete(p, DYN_TRAIT_TYPE);
m.precede(p).complete(p, DYN_TRAIT_TYPE)
}

View file

@ -36,7 +36,7 @@ impl<'a> LexedStr<'a> {
};
for token in rustc_lexer::tokenize(&text[conv.offset..]) {
let token_text = &text[conv.offset..][..token.len];
let token_text = &text[conv.offset..][..token.len as usize];
conv.extend_token(&token.kind, token_text);
}
@ -49,8 +49,8 @@ impl<'a> LexedStr<'a> {
return None;
}
let token = rustc_lexer::first_token(text);
if token.len != text.len() {
let token = rustc_lexer::tokenize(text).next()?;
if token.len as usize != text.len() {
return None;
}
@ -175,6 +175,10 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::Ident => {
SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
}
rustc_lexer::TokenKind::InvalidIdent => {
err = "Ident contains invalid characters";
IDENT
}
rustc_lexer::TokenKind::RawIdent => IDENT,
rustc_lexer::TokenKind::Literal { kind, .. } => {
@ -221,6 +225,7 @@ impl<'a> Converter<'a> {
err = "unknown literal prefix";
IDENT
}
rustc_lexer::TokenKind::Eof => EOF,
}
};
@ -268,35 +273,30 @@ impl<'a> Converter<'a> {
}
BYTE_STRING
}
rustc_lexer::LiteralKind::RawStr { err: raw_str_err, .. } => {
if let Some(raw_str_err) = raw_str_err {
err = match raw_str_err {
rustc_lexer::RawStrError::InvalidStarter { .. } => "Missing `\"` symbol after `#` symbols to begin the raw string literal",
rustc_lexer::RawStrError::NoTerminator { expected, found, .. } => if expected == found {
"Missing trailing `\"` to terminate the raw string literal"
} else {
"Missing trailing `\"` with `#` symbols to terminate the raw string literal"
},
rustc_lexer::RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols",
};
};
rustc_lexer::LiteralKind::CStr { terminated } => {
if !terminated {
err = "Missing trailing `\"` symbol to terminate the string literal";
}
C_STRING
}
rustc_lexer::LiteralKind::RawStr { n_hashes } => {
if n_hashes.is_none() {
err = "Invalid raw string literal";
}
STRING
}
rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => {
if let Some(raw_str_err) = raw_str_err {
err = match raw_str_err {
rustc_lexer::RawStrError::InvalidStarter { .. } => "Missing `\"` symbol after `#` symbols to begin the raw byte string literal",
rustc_lexer::RawStrError::NoTerminator { expected, found, .. } => if expected == found {
"Missing trailing `\"` to terminate the raw byte string literal"
} else {
"Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"
},
rustc_lexer::RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw byte strings may be delimited by up to 65535 `#` symbols",
};
};
rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
if n_hashes.is_none() {
err = "Invalid raw string literal";
}
BYTE_STRING
}
rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
if n_hashes.is_none() {
err = "Invalid raw string literal";
}
C_STRING
}
};
let err = if err.is_empty() { None } else { Some(err) };

View file

@ -131,6 +131,7 @@ pub enum PrefixEntryPoint {
Block,
Stmt,
Pat,
PatTop,
Ty,
Expr,
Path,
@ -145,6 +146,7 @@ impl PrefixEntryPoint {
PrefixEntryPoint::Block => grammar::entry::prefix::block,
PrefixEntryPoint::Stmt => grammar::entry::prefix::stmt,
PrefixEntryPoint::Pat => grammar::entry::prefix::pat,
PrefixEntryPoint::PatTop => grammar::entry::prefix::pat_top,
PrefixEntryPoint::Ty => grammar::entry::prefix::ty,
PrefixEntryPoint::Expr => grammar::entry::prefix::expr,
PrefixEntryPoint::Path => grammar::entry::prefix::path,

View file

@ -205,7 +205,7 @@ impl<'t> Parser<'t> {
marker.bomb.defuse();
marker = new_marker;
};
self.pos += 1 as usize;
self.pos += 1;
self.push_event(Event::FloatSplitHack { ends_in_dot });
(ends_in_dot, marker)
}

View file

@ -46,10 +46,8 @@ impl<'a> LexedStr<'a> {
// Tag the token as joint if it is float with a fractional part
// we use this jointness to inform the parser about what token split
// event to emit when we encounter a float literal in a field access
if kind == SyntaxKind::FLOAT_NUMBER {
if !self.text(i).ends_with('.') {
res.was_joint();
}
if kind == SyntaxKind::FLOAT_NUMBER && !self.text(i).ends_with('.') {
res.was_joint();
}
}

View file

@ -117,6 +117,7 @@ pub enum SyntaxKind {
BYTE,
STRING,
BYTE_STRING,
C_STRING,
ERROR,
IDENT,
WHITESPACE,
@ -245,6 +246,7 @@ pub enum SyntaxKind {
GENERIC_PARAM,
LIFETIME_PARAM,
TYPE_PARAM,
RETURN_TYPE_ARG,
CONST_PARAM,
GENERIC_ARG_LIST,
LIFETIME,
@ -378,7 +380,7 @@ impl SyntaxKind {
)
}
pub fn is_literal(self) -> bool {
matches!(self, INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING)
matches!(self, INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING | C_STRING)
}
pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident {

View file

@ -33,8 +33,7 @@ fn stmt() {
fn pat() {
check(PrefixEntryPoint::Pat, "x y", "x");
check(PrefixEntryPoint::Pat, "fn f() {}", "fn");
// FIXME: This one is wrong, we should consume only one pattern.
check(PrefixEntryPoint::Pat, ".. ..", ".. ..");
check(PrefixEntryPoint::Pat, ".. ..", "..");
}
#[test]