mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-26 20:09:19 +00:00
parent
b3ef934ccb
commit
25242fe93f
395 changed files with 14569 additions and 5755 deletions
|
@ -51,7 +51,7 @@ pub(crate) mod entry {
|
|||
use super::*;
|
||||
|
||||
pub(crate) fn vis(p: &mut Parser<'_>) {
|
||||
let _ = opt_visibility(p, false);
|
||||
opt_visibility(p, false);
|
||||
}
|
||||
|
||||
pub(crate) fn block(p: &mut Parser<'_>) {
|
||||
|
@ -70,10 +70,10 @@ pub(crate) mod entry {
|
|||
types::type_(p);
|
||||
}
|
||||
pub(crate) fn expr(p: &mut Parser<'_>) {
|
||||
let _ = expressions::expr(p);
|
||||
expressions::expr(p);
|
||||
}
|
||||
pub(crate) fn path(p: &mut Parser<'_>) {
|
||||
let _ = paths::type_path(p);
|
||||
paths::type_path(p);
|
||||
}
|
||||
pub(crate) fn item(p: &mut Parser<'_>) {
|
||||
items::item_or_macro(p, true);
|
||||
|
|
|
@ -288,7 +288,7 @@ fn expr_bp(
|
|||
}
|
||||
|
||||
const LHS_FIRST: TokenSet =
|
||||
atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]]));
|
||||
atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-], T![_]]));
|
||||
|
||||
fn lhs(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
|
||||
let m;
|
||||
|
|
|
@ -48,6 +48,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
|
|||
T![unsafe],
|
||||
T![return],
|
||||
T![yield],
|
||||
T![do],
|
||||
T![break],
|
||||
T![continue],
|
||||
T![async],
|
||||
|
@ -93,6 +94,7 @@ pub(super) fn atom_expr(
|
|||
T![match] => match_expr(p),
|
||||
T![return] => return_expr(p),
|
||||
T![yield] => yield_expr(p),
|
||||
T![do] if p.nth_at_contextual_kw(1, T![yeet]) => yeet_expr(p),
|
||||
T![continue] => continue_expr(p),
|
||||
T![break] => break_expr(p, r),
|
||||
|
||||
|
@ -278,6 +280,8 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
// fn main() { || -> i32 { 92 }(); }
|
||||
block_expr(p);
|
||||
} else if p.at_ts(EXPR_FIRST) {
|
||||
// test closure_body_underscore_assignment
|
||||
// fn main() { || _ = 0; }
|
||||
expr(p);
|
||||
} else {
|
||||
p.error("expected expression");
|
||||
|
@ -531,6 +535,7 @@ fn return_expr(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
}
|
||||
m.complete(p, RETURN_EXPR)
|
||||
}
|
||||
|
||||
// test yield_expr
|
||||
// fn foo() {
|
||||
// yield;
|
||||
|
@ -546,6 +551,23 @@ fn yield_expr(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
m.complete(p, YIELD_EXPR)
|
||||
}
|
||||
|
||||
// test yeet_expr
|
||||
// fn foo() {
|
||||
// do yeet;
|
||||
// do yeet 1
|
||||
// }
|
||||
fn yeet_expr(p: &mut Parser<'_>) -> CompletedMarker {
|
||||
assert!(p.at(T![do]));
|
||||
assert!(p.nth_at_contextual_kw(1, T![yeet]));
|
||||
let m = p.start();
|
||||
p.bump(T![do]);
|
||||
p.bump_remap(T![yeet]);
|
||||
if p.at_ts(EXPR_FIRST) {
|
||||
expr(p);
|
||||
}
|
||||
m.complete(p, YEET_EXPR)
|
||||
}
|
||||
|
||||
// test continue_expr
|
||||
// fn foo() {
|
||||
// loop {
|
||||
|
|
|
@ -83,11 +83,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) {
|
|||
}
|
||||
p.expect(T![>]);
|
||||
} else {
|
||||
let mut empty = true;
|
||||
if first {
|
||||
let empty = if first {
|
||||
p.eat(T![::]);
|
||||
empty = false;
|
||||
}
|
||||
false
|
||||
} else {
|
||||
true
|
||||
};
|
||||
match p.current() {
|
||||
IDENT => {
|
||||
name_ref(p);
|
||||
|
|
|
@ -62,39 +62,50 @@ fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
|
|||
}
|
||||
|
||||
fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
|
||||
if let Some(lhs) = atom_pat(p, recovery_set) {
|
||||
// test range_pat
|
||||
// fn main() {
|
||||
// match 92 {
|
||||
// 0 ... 100 => (),
|
||||
// 101 ..= 200 => (),
|
||||
// 200 .. 301 => (),
|
||||
// 302 .. => (),
|
||||
// }
|
||||
//
|
||||
// match Some(10 as u8) {
|
||||
// Some(0) | None => (),
|
||||
// Some(1..) => ()
|
||||
// }
|
||||
//
|
||||
// match () {
|
||||
// S { a: 0 } => (),
|
||||
// S { a: 1.. } => (),
|
||||
// }
|
||||
//
|
||||
// match () {
|
||||
// [0] => (),
|
||||
// [1..] => (),
|
||||
// }
|
||||
//
|
||||
// match (10 as u8, 5 as u8) {
|
||||
// (0, _) => (),
|
||||
// (1.., _) => ()
|
||||
// }
|
||||
// }
|
||||
// test range_pat
|
||||
// fn main() {
|
||||
// match 92 {
|
||||
// 0 ... 100 => (),
|
||||
// 101 ..= 200 => (),
|
||||
// 200 .. 301 => (),
|
||||
// 302 .. => (),
|
||||
// ..= 303 => (),
|
||||
// }
|
||||
//
|
||||
// match Some(10 as u8) {
|
||||
// Some(0) | None => (),
|
||||
// Some(1..) => (),
|
||||
// Some(..=2) => (),
|
||||
// }
|
||||
//
|
||||
// match () {
|
||||
// S { a: 0 } => (),
|
||||
// S { a: 1.. } => (),
|
||||
// S { a: ..=2 } => (),
|
||||
// }
|
||||
//
|
||||
// match () {
|
||||
// [0] => (),
|
||||
// [1..] => (),
|
||||
// [..=2] => (),
|
||||
// }
|
||||
//
|
||||
// match (10 as u8, 5 as u8) {
|
||||
// (0, _) => (),
|
||||
// (1.., _) => (),
|
||||
// (..=2, _) => (),
|
||||
// }
|
||||
// }
|
||||
|
||||
// FIXME: support half_open_range_patterns (`..=2`),
|
||||
// exclusive_range_pattern (`..5`) with missing lhs
|
||||
if p.at(T![..=]) {
|
||||
let m = p.start();
|
||||
p.bump(T![..=]);
|
||||
atom_pat(p, recovery_set);
|
||||
m.complete(p, RANGE_PAT);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(lhs) = atom_pat(p, recovery_set) {
|
||||
for range_op in [T![...], T![..=], T![..]] {
|
||||
if p.at(range_op) {
|
||||
let m = lhs.precede(p);
|
||||
|
@ -115,11 +126,21 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
|
|||
// ^
|
||||
// `[0..]`
|
||||
// ^
|
||||
if matches!(p.current(), T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']']) {
|
||||
// `0 .. if`
|
||||
// ^
|
||||
if matches!(
|
||||
p.current(),
|
||||
T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']'] | T![if]
|
||||
) {
|
||||
// test half_open_range_pat
|
||||
// fn f() {
|
||||
// let 0 .. = 1u32;
|
||||
// let 0..: _ = 1u32;
|
||||
//
|
||||
// match 42 {
|
||||
// 0 .. if true => (),
|
||||
// _ => (),
|
||||
// }
|
||||
// }
|
||||
} else {
|
||||
atom_pat(p, recovery_set);
|
||||
|
|
|
@ -57,7 +57,7 @@ impl<'a> LexedStr<'a> {
|
|||
let mut conv = Converter::new(text);
|
||||
conv.extend_token(&token.kind, text);
|
||||
match &*conv.res.kind {
|
||||
[kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg.clone()))),
|
||||
[kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ impl Output {
|
|||
}
|
||||
|
||||
pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
||||
let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | (0 << 4) | 1;
|
||||
let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | 1;
|
||||
self.event.push(e)
|
||||
}
|
||||
|
||||
|
|
|
@ -148,11 +148,16 @@ impl<'t> Parser<'t> {
|
|||
kinds.contains(self.current())
|
||||
}
|
||||
|
||||
/// Checks if the current token is contextual keyword with text `t`.
|
||||
/// Checks if the current token is contextual keyword `kw`.
|
||||
pub(crate) fn at_contextual_kw(&self, kw: SyntaxKind) -> bool {
|
||||
self.inp.contextual_kind(self.pos) == kw
|
||||
}
|
||||
|
||||
/// Checks if the nth token is contextual keyword `kw`.
|
||||
pub(crate) fn nth_at_contextual_kw(&self, n: usize, kw: SyntaxKind) -> bool {
|
||||
self.inp.contextual_kind(self.pos + n) == kw
|
||||
}
|
||||
|
||||
/// Starts a new node in the syntax tree. All nodes and tokens
|
||||
/// consumed between the `start` and the corresponding `Marker::complete`
|
||||
/// belong to the same node.
|
||||
|
@ -162,7 +167,7 @@ impl<'t> Parser<'t> {
|
|||
Marker::new(pos)
|
||||
}
|
||||
|
||||
/// Consume the next token if `kind` matches.
|
||||
/// Consume the next token. Panics if the parser isn't currently at `kind`.
|
||||
pub(crate) fn bump(&mut self, kind: SyntaxKind) {
|
||||
assert!(self.eat(kind));
|
||||
}
|
||||
|
@ -205,7 +210,7 @@ impl<'t> Parser<'t> {
|
|||
if self.eat(kind) {
|
||||
return true;
|
||||
}
|
||||
self.error(format!("expected {:?}", kind));
|
||||
self.error(format!("expected {kind:?}"));
|
||||
false
|
||||
}
|
||||
|
||||
|
@ -237,6 +242,7 @@ impl<'t> Parser<'t> {
|
|||
|
||||
fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
|
||||
self.pos += n_raw_tokens as usize;
|
||||
self.steps.set(0);
|
||||
self.push_event(Event::Token { kind, n_raw_tokens });
|
||||
}
|
||||
|
||||
|
|
|
@ -80,8 +80,8 @@ impl<'a> LexedStr<'a> {
|
|||
State::PendingEnter | State::Normal => unreachable!(),
|
||||
}
|
||||
|
||||
let is_eof = builder.pos == builder.lexed.len();
|
||||
is_eof
|
||||
// is_eof?
|
||||
builder.pos == builder.lexed.len()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -37,8 +37,8 @@ fn lex(text: &str) -> String {
|
|||
let text = lexed.text(i);
|
||||
let error = lexed.error(i);
|
||||
|
||||
let error = error.map(|err| format!(" error: {}", err)).unwrap_or_default();
|
||||
writeln!(res, "{:?} {:?}{}", kind, text, error).unwrap();
|
||||
let error = error.map(|err| format!(" error: {err}")).unwrap_or_default();
|
||||
writeln!(res, "{kind:?} {text:?}{error}").unwrap();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ fn lex(text: &str) -> String {
|
|||
fn parse_ok() {
|
||||
for case in TestCase::list("parser/ok") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ fn parse_ok() {
|
|||
fn parse_inline_ok() {
|
||||
for case in TestCase::list("parser/inline/ok") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ fn parse_inline_ok() {
|
|||
fn parse_err() {
|
||||
for case in TestCase::list("parser/err") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ fn parse_err() {
|
|||
fn parse_inline_err() {
|
||||
for case in TestCase::list("parser/inline/err") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
}
|
||||
|
@ -93,14 +93,12 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
|||
crate::StrStep::Token { kind, text } => {
|
||||
assert!(depth > 0);
|
||||
len += text.len();
|
||||
write!(buf, "{}", indent).unwrap();
|
||||
write!(buf, "{:?} {:?}\n", kind, text).unwrap();
|
||||
writeln!(buf, "{indent}{kind:?} {text:?}").unwrap();
|
||||
}
|
||||
crate::StrStep::Enter { kind } => {
|
||||
assert!(depth > 0 || len == 0);
|
||||
depth += 1;
|
||||
write!(buf, "{}", indent).unwrap();
|
||||
write!(buf, "{:?}\n", kind).unwrap();
|
||||
writeln!(buf, "{indent}{kind:?}").unwrap();
|
||||
indent.push_str(" ");
|
||||
}
|
||||
crate::StrStep::Exit => {
|
||||
|
@ -111,7 +109,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
|||
}
|
||||
crate::StrStep::Error { msg, pos } => {
|
||||
assert!(depth > 0);
|
||||
errors.push(format!("error {}: {}\n", pos, msg))
|
||||
errors.push(format!("error {pos}: {msg}\n"))
|
||||
}
|
||||
});
|
||||
assert_eq!(
|
||||
|
@ -124,7 +122,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
|||
|
||||
for (token, msg) in lexed.errors() {
|
||||
let pos = lexed.text_start(token);
|
||||
errors.push(format!("error {}: {}\n", pos, msg));
|
||||
errors.push(format!("error {pos}: {msg}\n"));
|
||||
}
|
||||
|
||||
let has_errors = !errors.is_empty();
|
||||
|
@ -149,7 +147,7 @@ impl TestCase {
|
|||
|
||||
let mut res = Vec::new();
|
||||
let read_dir = fs::read_dir(&dir)
|
||||
.unwrap_or_else(|err| panic!("can't `read_dir` {}: {}", dir.display(), err));
|
||||
.unwrap_or_else(|err| panic!("can't `read_dir` {}: {err}", dir.display()));
|
||||
for file in read_dir {
|
||||
let file = file.unwrap();
|
||||
let path = file.path();
|
||||
|
|
|
@ -23,7 +23,7 @@ fn sourcegen_parser_tests() {
|
|||
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
|
||||
let existing = existing_tests(&tests_dir, true);
|
||||
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
|
||||
panic!("Test is deleted: {}", t);
|
||||
panic!("Test is deleted: {t}");
|
||||
}
|
||||
|
||||
let mut new_idx = existing.len() + 1;
|
||||
|
@ -31,7 +31,7 @@ fn sourcegen_parser_tests() {
|
|||
let path = match existing.get(name) {
|
||||
Some((path, _test)) => path.clone(),
|
||||
None => {
|
||||
let file_name = format!("{:04}_{}.rs", new_idx, name);
|
||||
let file_name = format!("{new_idx:04}_{name}.rs");
|
||||
new_idx += 1;
|
||||
tests_dir.join(file_name)
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ fn existing_tests(dir: &Path, ok: bool) -> HashMap<String, (PathBuf, Test)> {
|
|||
let text = fs::read_to_string(&path).unwrap();
|
||||
let test = Test { name: name.clone(), text, ok };
|
||||
if let Some(old) = res.insert(name, (path, test)) {
|
||||
println!("Duplicate test: {:?}", old);
|
||||
println!("Duplicate test: {old:?}");
|
||||
}
|
||||
}
|
||||
res
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue