mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-26 20:09:19 +00:00
internal: Improve parser recovery a bunch
This commit is contained in:
parent
5fdf640fb3
commit
1be24e0899
14 changed files with 249 additions and 134 deletions
|
@ -68,6 +68,12 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
|
|||
Err(m) => m,
|
||||
};
|
||||
|
||||
if !p.at_ts(EXPR_FIRST) {
|
||||
p.err_and_bump("expected expression, item or let statement");
|
||||
m.abandon(p);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some((cm, blocklike)) = expr_stmt(p, Some(m)) {
|
||||
if !(p.at(T!['}']) || (semicolon != Semicolon::Required && p.at(EOF))) {
|
||||
// test no_semi_after_block
|
||||
|
@ -227,6 +233,12 @@ fn expr_bp(
|
|||
attributes::outer_attrs(p);
|
||||
m
|
||||
});
|
||||
|
||||
if !p.at_ts(EXPR_FIRST) {
|
||||
p.err_recover("expected expression", atom::EXPR_RECOVERY_SET);
|
||||
m.abandon(p);
|
||||
return None;
|
||||
}
|
||||
let mut lhs = match lhs(p, r) {
|
||||
Some((lhs, blocklike)) => {
|
||||
let lhs = lhs.extend_to(p, m);
|
||||
|
@ -551,6 +563,12 @@ fn cast_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
|
|||
m.complete(p, CAST_EXPR)
|
||||
}
|
||||
|
||||
// test_err arg_list_recovery
|
||||
// fn main() {
|
||||
// foo(bar::);
|
||||
// foo(bar:);
|
||||
// foo(bar+);
|
||||
// }
|
||||
fn arg_list(p: &mut Parser<'_>) {
|
||||
assert!(p.at(T!['(']));
|
||||
let m = p.start();
|
||||
|
@ -563,8 +581,15 @@ fn arg_list(p: &mut Parser<'_>) {
|
|||
if !expr(p) {
|
||||
break;
|
||||
}
|
||||
if !p.at(T![')']) && !p.expect(T![,]) {
|
||||
break;
|
||||
if !p.at(T![,]) {
|
||||
if p.at_ts(EXPR_FIRST) {
|
||||
p.error("expected `,`");
|
||||
continue;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
p.bump(T![,]);
|
||||
}
|
||||
}
|
||||
p.eat(T![')']);
|
||||
|
|
|
@ -40,26 +40,28 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
|
|||
T!['{'],
|
||||
T!['['],
|
||||
T![|],
|
||||
T![move],
|
||||
T![box],
|
||||
T![if],
|
||||
T![while],
|
||||
T![match],
|
||||
T![unsafe],
|
||||
T![return],
|
||||
T![yield],
|
||||
T![do],
|
||||
T![break],
|
||||
T![continue],
|
||||
T![async],
|
||||
T![try],
|
||||
T![box],
|
||||
T![break],
|
||||
T![const],
|
||||
T![loop],
|
||||
T![continue],
|
||||
T![do],
|
||||
T![for],
|
||||
T![if],
|
||||
T![let],
|
||||
T![loop],
|
||||
T![match],
|
||||
T![move],
|
||||
T![return],
|
||||
T![static],
|
||||
T![try],
|
||||
T![unsafe],
|
||||
T![while],
|
||||
T![yield],
|
||||
LIFETIME_IDENT,
|
||||
]));
|
||||
|
||||
const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![let]]);
|
||||
pub(super) const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![')'], T![']']]);
|
||||
|
||||
pub(super) fn atom_expr(
|
||||
p: &mut Parser<'_>,
|
||||
|
@ -157,7 +159,7 @@ pub(super) fn atom_expr(
|
|||
T![for] => for_expr(p, None),
|
||||
|
||||
_ => {
|
||||
p.err_recover("expected expression", EXPR_RECOVERY_SET);
|
||||
p.err_and_bump("expected expression");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -67,6 +67,10 @@ fn path_for_qualifier(
|
|||
}
|
||||
}
|
||||
|
||||
const EXPR_PATH_SEGMENT_RECOVERY_SET: TokenSet =
|
||||
items::ITEM_RECOVERY_SET.union(TokenSet::new(&[T![')'], T![,], T![let]]));
|
||||
const TYPE_PATH_SEGMENT_RECOVERY_SET: TokenSet = types::TYPE_RECOVERY_SET;
|
||||
|
||||
fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
|
||||
let m = p.start();
|
||||
// test qual_paths
|
||||
|
@ -102,7 +106,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
|
|||
m.complete(p, NAME_REF);
|
||||
}
|
||||
_ => {
|
||||
p.err_recover("expected identifier", items::ITEM_RECOVERY_SET);
|
||||
let recover_set = match mode {
|
||||
Mode::Use => items::ITEM_RECOVERY_SET,
|
||||
Mode::Type => TYPE_PATH_SEGMENT_RECOVERY_SET,
|
||||
Mode::Expr => EXPR_PATH_SEGMENT_RECOVERY_SET,
|
||||
};
|
||||
p.err_recover("expected identifier", recover_set);
|
||||
if empty {
|
||||
// test_err empty_segment
|
||||
// use crate::;
|
||||
|
|
|
@ -17,8 +17,9 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
|
|||
T![Self],
|
||||
]));
|
||||
|
||||
const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
||||
pub(super) const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
||||
T![')'],
|
||||
T![>],
|
||||
T![,],
|
||||
// test_err struct_field_recover
|
||||
// struct S { f pub g: () }
|
||||
|
|
|
@ -15,6 +15,7 @@ use crate::{LexedStr, TopEntryPoint};
|
|||
#[test]
|
||||
fn lex_ok() {
|
||||
for case in TestCase::list("lexer/ok") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let actual = lex(&case.text);
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
|
@ -23,6 +24,7 @@ fn lex_ok() {
|
|||
#[test]
|
||||
fn lex_err() {
|
||||
for case in TestCase::list("lexer/err") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let actual = lex(&case.text);
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
|
@ -46,6 +48,7 @@ fn lex(text: &str) -> String {
|
|||
#[test]
|
||||
fn parse_ok() {
|
||||
for case in TestCase::list("parser/ok") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
|
@ -55,6 +58,7 @@ fn parse_ok() {
|
|||
#[test]
|
||||
fn parse_inline_ok() {
|
||||
for case in TestCase::list("parser/inline/ok") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
|
@ -64,6 +68,7 @@ fn parse_inline_ok() {
|
|||
#[test]
|
||||
fn parse_err() {
|
||||
for case in TestCase::list("parser/err") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
|
@ -73,6 +78,7 @@ fn parse_err() {
|
|||
#[test]
|
||||
fn parse_inline_err() {
|
||||
for case in TestCase::list("parser/inline/err") {
|
||||
let _guard = stdx::panic_context::enter(format!("{:?}", case.rs));
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
|
|
|
@ -65,7 +65,7 @@ fn macro_stmt() {
|
|||
MACRO_STMTS
|
||||
ERROR
|
||||
SHEBANG "#!/usr/bin/rust"
|
||||
error 0: expected expression
|
||||
error 0: expected expression, item or let statement
|
||||
"##]],
|
||||
);
|
||||
check(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue