mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 13:25:09 +00:00
Add always disabled gen parse support
This commit is contained in:
parent
983c9c122e
commit
7011094685
32 changed files with 376 additions and 55 deletions
|
@ -12,8 +12,13 @@ pub enum Edition {
|
|||
}
|
||||
|
||||
impl Edition {
|
||||
/// The current latest stable edition, note this is usually not the right choice in code.
|
||||
pub const CURRENT: Edition = Edition::Edition2021;
|
||||
pub const DEFAULT: Edition = Edition::Edition2015;
|
||||
|
||||
pub fn at_least_2024(self) -> bool {
|
||||
self >= Edition::Edition2024
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -51,6 +51,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
|
|||
T![const],
|
||||
T![continue],
|
||||
T![do],
|
||||
T![gen],
|
||||
T![for],
|
||||
T![if],
|
||||
T![let],
|
||||
|
@ -138,15 +139,37 @@ pub(super) fn atom_expr(
|
|||
// fn f() { const { } }
|
||||
// fn f() { async { } }
|
||||
// fn f() { async move { } }
|
||||
T![const] | T![unsafe] | T![async] if la == T!['{'] => {
|
||||
T![const] | T![unsafe] | T![async] | T![gen] if la == T!['{'] => {
|
||||
let m = p.start();
|
||||
p.bump_any();
|
||||
stmt_list(p);
|
||||
m.complete(p, BLOCK_EXPR)
|
||||
}
|
||||
T![async] if la == T![move] && p.nth(2) == T!['{'] => {
|
||||
// test_err gen_blocks
|
||||
// pub fn main() {
|
||||
// gen { yield ""; };
|
||||
// async gen { yield ""; };
|
||||
// gen move { yield ""; };
|
||||
// async gen move { yield ""; };
|
||||
// }
|
||||
T![async] if la == T![gen] && p.nth(2) == T!['{'] => {
|
||||
let m = p.start();
|
||||
p.bump(T![async]);
|
||||
p.eat(T![gen]);
|
||||
stmt_list(p);
|
||||
m.complete(p, BLOCK_EXPR)
|
||||
}
|
||||
T![async] | T![gen] if la == T![move] && p.nth(2) == T!['{'] => {
|
||||
let m = p.start();
|
||||
p.bump_any();
|
||||
p.bump(T![move]);
|
||||
stmt_list(p);
|
||||
m.complete(p, BLOCK_EXPR)
|
||||
}
|
||||
T![async] if la == T![gen] && p.nth(2) == T![move] && p.nth(3) == T!['{'] => {
|
||||
let m = p.start();
|
||||
p.bump(T![async]);
|
||||
p.bump(T![gen]);
|
||||
p.bump(T![move]);
|
||||
stmt_list(p);
|
||||
m.complete(p, BLOCK_EXPR)
|
||||
|
@ -355,6 +378,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
|
|||
p.eat(T![const]);
|
||||
p.eat(T![static]);
|
||||
p.eat(T![async]);
|
||||
p.eat(T![gen]);
|
||||
p.eat(T![move]);
|
||||
|
||||
if !p.at(T![|]) {
|
||||
|
|
|
@ -112,11 +112,22 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
|
|||
|
||||
// test_err async_without_semicolon
|
||||
// fn foo() { let _ = async {} }
|
||||
if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) {
|
||||
if p.at(T![async])
|
||||
&& (!matches!(p.nth(1), T!['{'] | T![gen] | T![move] | T![|])
|
||||
|| matches!((p.nth(1), p.nth(2)), (T![gen], T![fn])))
|
||||
{
|
||||
p.eat(T![async]);
|
||||
has_mods = true;
|
||||
}
|
||||
|
||||
// test_err gen_fn
|
||||
// gen fn gen_fn() {}
|
||||
// async gen fn async_gen_fn() {}
|
||||
if p.at(T![gen]) && p.nth(1) == T![fn] {
|
||||
p.eat(T![gen]);
|
||||
has_mods = true;
|
||||
}
|
||||
|
||||
// test_err unsafe_block_in_mod
|
||||
// fn foo(){} unsafe { } fn bar(){}
|
||||
if p.at(T![unsafe]) && p.nth(1) != T!['{'] {
|
||||
|
|
|
@ -13,6 +13,7 @@ use std::ops;
|
|||
use rustc_lexer::unescape::{EscapeError, Mode};
|
||||
|
||||
use crate::{
|
||||
Edition,
|
||||
SyntaxKind::{self, *},
|
||||
T,
|
||||
};
|
||||
|
@ -30,9 +31,9 @@ struct LexError {
|
|||
}
|
||||
|
||||
impl<'a> LexedStr<'a> {
|
||||
pub fn new(text: &'a str) -> LexedStr<'a> {
|
||||
pub fn new(edition: Edition, text: &'a str) -> LexedStr<'a> {
|
||||
let _p = tracing::info_span!("LexedStr::new").entered();
|
||||
let mut conv = Converter::new(text);
|
||||
let mut conv = Converter::new(edition, text);
|
||||
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
|
||||
conv.res.push(SHEBANG, conv.offset);
|
||||
conv.offset = shebang_len;
|
||||
|
@ -47,7 +48,7 @@ impl<'a> LexedStr<'a> {
|
|||
conv.finalize_with_eof()
|
||||
}
|
||||
|
||||
pub fn single_token(text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
|
||||
pub fn single_token(edition: Edition, text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
|
||||
if text.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
@ -57,7 +58,7 @@ impl<'a> LexedStr<'a> {
|
|||
return None;
|
||||
}
|
||||
|
||||
let mut conv = Converter::new(text);
|
||||
let mut conv = Converter::new(edition, text);
|
||||
conv.extend_token(&token.kind, text);
|
||||
match &*conv.res.kind {
|
||||
[kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))),
|
||||
|
@ -129,13 +130,15 @@ impl<'a> LexedStr<'a> {
|
|||
struct Converter<'a> {
|
||||
res: LexedStr<'a>,
|
||||
offset: usize,
|
||||
edition: Edition,
|
||||
}
|
||||
|
||||
impl<'a> Converter<'a> {
|
||||
fn new(text: &'a str) -> Self {
|
||||
fn new(edition: Edition, text: &'a str) -> Self {
|
||||
Self {
|
||||
res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() },
|
||||
offset: 0,
|
||||
edition,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,6 +178,11 @@ impl<'a> Converter<'a> {
|
|||
rustc_lexer::TokenKind::Whitespace => WHITESPACE,
|
||||
|
||||
rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE,
|
||||
rustc_lexer::TokenKind::Ident
|
||||
if token_text == "gen" && !self.edition.at_least_2024() =>
|
||||
{
|
||||
IDENT
|
||||
}
|
||||
rustc_lexer::TokenKind::Ident => {
|
||||
SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -9,7 +9,7 @@ use std::{
|
|||
|
||||
use expect_test::expect_file;
|
||||
|
||||
use crate::{LexedStr, TopEntryPoint};
|
||||
use crate::{Edition, LexedStr, TopEntryPoint};
|
||||
|
||||
#[test]
|
||||
fn lex_ok() {
|
||||
|
@ -30,7 +30,7 @@ fn lex_err() {
|
|||
}
|
||||
|
||||
fn lex(text: &str) -> String {
|
||||
let lexed = LexedStr::new(text);
|
||||
let lexed = LexedStr::new(Edition::CURRENT, text);
|
||||
|
||||
let mut res = String::new();
|
||||
for i in 0..lexed.len() {
|
||||
|
@ -85,9 +85,9 @@ fn parse_inline_err() {
|
|||
}
|
||||
|
||||
fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
||||
let lexed = LexedStr::new(text);
|
||||
let lexed = LexedStr::new(Edition::CURRENT, text);
|
||||
let input = lexed.to_input();
|
||||
let output = entry.parse(&input, crate::Edition::CURRENT);
|
||||
let output = entry.parse(&input, Edition::CURRENT);
|
||||
|
||||
let mut buf = String::new();
|
||||
let mut errors = Vec::new();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{LexedStr, PrefixEntryPoint, Step};
|
||||
use crate::{Edition, LexedStr, PrefixEntryPoint, Step};
|
||||
|
||||
#[test]
|
||||
fn vis() {
|
||||
|
@ -82,11 +82,11 @@ fn meta_item() {
|
|||
|
||||
#[track_caller]
|
||||
fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
|
||||
let lexed = LexedStr::new(input);
|
||||
let lexed = LexedStr::new(Edition::CURRENT, input);
|
||||
let input = lexed.to_input();
|
||||
|
||||
let mut n_tokens = 0;
|
||||
for step in entry.parse(&input, crate::Edition::CURRENT).iter() {
|
||||
for step in entry.parse(&input, Edition::CURRENT).iter() {
|
||||
match step {
|
||||
Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
|
||||
Step::FloatSplit { .. } => n_tokens += 1,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue