mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-13 22:06:56 +00:00
Merge commit '55d9a533b3
' into sync-from-ra
This commit is contained in:
commit
c8c635a3f8
237 changed files with 6273 additions and 3337 deletions
55
crates/parser/src/edition.rs
Normal file
55
crates/parser/src/edition.rs
Normal file
|
@ -0,0 +1,55 @@
|
|||
//! The edition of the Rust language used in a crate.
|
||||
// Ideally this would be defined in the span crate, but the dependency chain is all over the place
|
||||
// wrt to span, parser and syntax.
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Edition {
|
||||
Edition2015,
|
||||
Edition2018,
|
||||
Edition2021,
|
||||
Edition2024,
|
||||
}
|
||||
|
||||
impl Edition {
|
||||
pub const CURRENT: Edition = Edition::Edition2021;
|
||||
pub const DEFAULT: Edition = Edition::Edition2015;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParseEditionError {
|
||||
invalid_input: String,
|
||||
}
|
||||
|
||||
impl std::error::Error for ParseEditionError {}
|
||||
impl fmt::Display for ParseEditionError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "invalid edition: {:?}", self.invalid_input)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for Edition {
|
||||
type Err = ParseEditionError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let res = match s {
|
||||
"2015" => Edition::Edition2015,
|
||||
"2018" => Edition::Edition2018,
|
||||
"2021" => Edition::Edition2021,
|
||||
"2024" => Edition::Edition2024,
|
||||
_ => return Err(ParseEditionError { invalid_input: s.to_owned() }),
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Edition {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(match self {
|
||||
Edition::Edition2015 => "2015",
|
||||
Edition::Edition2018 => "2018",
|
||||
Edition::Edition2021 => "2021",
|
||||
Edition::Edition2024 => "2024",
|
||||
})
|
||||
}
|
||||
}
|
|
@ -26,6 +26,7 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
|
|||
#[cfg(feature = "in-rust-tree")]
|
||||
extern crate rustc_lexer;
|
||||
|
||||
mod edition;
|
||||
mod event;
|
||||
mod grammar;
|
||||
mod input;
|
||||
|
@ -42,6 +43,7 @@ mod tests;
|
|||
pub(crate) use token_set::TokenSet;
|
||||
|
||||
pub use crate::{
|
||||
edition::Edition,
|
||||
input::Input,
|
||||
lexed_str::LexedStr,
|
||||
output::{Output, Step},
|
||||
|
@ -86,7 +88,7 @@ pub enum TopEntryPoint {
|
|||
}
|
||||
|
||||
impl TopEntryPoint {
|
||||
pub fn parse(&self, input: &Input) -> Output {
|
||||
pub fn parse(&self, input: &Input, edition: Edition) -> Output {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "TopEntryPoint::parse", ?self).entered();
|
||||
let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
|
||||
TopEntryPoint::SourceFile => grammar::entry::top::source_file,
|
||||
|
@ -98,7 +100,7 @@ impl TopEntryPoint {
|
|||
TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
|
||||
TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input,
|
||||
};
|
||||
let mut p = parser::Parser::new(input);
|
||||
let mut p = parser::Parser::new(input, edition);
|
||||
entry_point(&mut p);
|
||||
let events = p.finish();
|
||||
let res = event::process(events);
|
||||
|
@ -150,7 +152,7 @@ pub enum PrefixEntryPoint {
|
|||
}
|
||||
|
||||
impl PrefixEntryPoint {
|
||||
pub fn parse(&self, input: &Input) -> Output {
|
||||
pub fn parse(&self, input: &Input, edition: Edition) -> Output {
|
||||
let entry_point: fn(&'_ mut parser::Parser<'_>) = match self {
|
||||
PrefixEntryPoint::Vis => grammar::entry::prefix::vis,
|
||||
PrefixEntryPoint::Block => grammar::entry::prefix::block,
|
||||
|
@ -163,7 +165,7 @@ impl PrefixEntryPoint {
|
|||
PrefixEntryPoint::Item => grammar::entry::prefix::item,
|
||||
PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
|
||||
};
|
||||
let mut p = parser::Parser::new(input);
|
||||
let mut p = parser::Parser::new(input, edition);
|
||||
entry_point(&mut p);
|
||||
let events = p.finish();
|
||||
event::process(events)
|
||||
|
@ -187,9 +189,9 @@ impl Reparser {
|
|||
///
|
||||
/// Tokens must start with `{`, end with `}` and form a valid brace
|
||||
/// sequence.
|
||||
pub fn parse(self, tokens: &Input) -> Output {
|
||||
pub fn parse(self, tokens: &Input, edition: Edition) -> Output {
|
||||
let Reparser(r) = self;
|
||||
let mut p = parser::Parser::new(tokens);
|
||||
let mut p = parser::Parser::new(tokens, edition);
|
||||
r(&mut p);
|
||||
let events = p.finish();
|
||||
event::process(events)
|
||||
|
|
|
@ -8,6 +8,7 @@ use limit::Limit;
|
|||
use crate::{
|
||||
event::Event,
|
||||
input::Input,
|
||||
Edition,
|
||||
SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
|
||||
TokenSet, T,
|
||||
};
|
||||
|
@ -26,13 +27,14 @@ pub(crate) struct Parser<'t> {
|
|||
pos: usize,
|
||||
events: Vec<Event>,
|
||||
steps: Cell<u32>,
|
||||
_edition: Edition,
|
||||
}
|
||||
|
||||
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
|
||||
|
||||
impl<'t> Parser<'t> {
|
||||
pub(super) fn new(inp: &'t Input) -> Parser<'t> {
|
||||
Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0) }
|
||||
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
|
||||
Parser { inp, pos: 0, events: Vec::new(), steps: Cell::new(0), _edition: edition }
|
||||
}
|
||||
|
||||
pub(crate) fn finish(self) -> Vec<Event> {
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -88,7 +88,7 @@ fn parse_inline_err() {
|
|||
fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
||||
let lexed = LexedStr::new(text);
|
||||
let input = lexed.to_input();
|
||||
let output = entry.parse(&input);
|
||||
let output = entry.parse(&input, crate::Edition::CURRENT);
|
||||
|
||||
let mut buf = String::new();
|
||||
let mut errors = Vec::new();
|
||||
|
|
|
@ -86,7 +86,7 @@ fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
|
|||
let input = lexed.to_input();
|
||||
|
||||
let mut n_tokens = 0;
|
||||
for step in entry.parse(&input).iter() {
|
||||
for step in entry.parse(&input, crate::Edition::CURRENT).iter() {
|
||||
match step {
|
||||
Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
|
||||
Step::FloatSplit { .. } => n_tokens += 1,
|
||||
|
|
|
@ -4,34 +4,48 @@ use crate::SyntaxKind;
|
|||
|
||||
/// A bit-set of `SyntaxKind`s
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) struct TokenSet(u128);
|
||||
pub(crate) struct TokenSet([u64; 3]);
|
||||
|
||||
/// `TokenSet`s should only include token `SyntaxKind`s, so the discriminant of any passed/included
|
||||
/// `SyntaxKind` must *not* be greater than that of the last token `SyntaxKind`.
|
||||
/// See #17037.
|
||||
const LAST_TOKEN_KIND_DISCRIMINANT: usize = SyntaxKind::SHEBANG as usize;
|
||||
|
||||
impl TokenSet {
|
||||
pub(crate) const EMPTY: TokenSet = TokenSet(0);
|
||||
pub(crate) const EMPTY: TokenSet = TokenSet([0; 3]);
|
||||
|
||||
pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
|
||||
let mut res = 0u128;
|
||||
let mut res = [0; 3];
|
||||
let mut i = 0;
|
||||
while i < kinds.len() {
|
||||
res |= mask(kinds[i]);
|
||||
let discriminant = kinds[i] as usize;
|
||||
debug_assert!(
|
||||
discriminant <= LAST_TOKEN_KIND_DISCRIMINANT,
|
||||
"Expected a token `SyntaxKind`"
|
||||
);
|
||||
let idx = discriminant / 64;
|
||||
res[idx] |= 1 << (discriminant % 64);
|
||||
i += 1;
|
||||
}
|
||||
TokenSet(res)
|
||||
}
|
||||
|
||||
pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
|
||||
TokenSet(self.0 | other.0)
|
||||
TokenSet([self.0[0] | other.0[0], self.0[1] | other.0[1], self.0[2] | other.0[2]])
|
||||
}
|
||||
|
||||
pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool {
|
||||
self.0 & mask(kind) != 0
|
||||
let discriminant = kind as usize;
|
||||
debug_assert!(
|
||||
discriminant <= LAST_TOKEN_KIND_DISCRIMINANT,
|
||||
"Expected a token `SyntaxKind`"
|
||||
);
|
||||
let idx = discriminant / 64;
|
||||
let mask = 1 << (discriminant % 64);
|
||||
self.0[idx] & mask != 0
|
||||
}
|
||||
}
|
||||
|
||||
const fn mask(kind: SyntaxKind) -> u128 {
|
||||
1u128 << (kind as usize)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn token_set_works_for_tokens() {
|
||||
use crate::SyntaxKind::*;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue