mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 21:05:02 +00:00
Merge branch 'master' of github.com:rust-analyzer/rust-analyzer
This commit is contained in:
commit
0b40876b99
255 changed files with 11248 additions and 11172 deletions
|
@ -16,9 +16,7 @@ use crate::{
|
|||
};
|
||||
|
||||
pub use self::{
|
||||
expr_extensions::{
|
||||
ArrayExprKind, BinOp, BlockModifier, ElseBranch, LiteralKind, PrefixOp, RangeOp,
|
||||
},
|
||||
expr_extensions::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp},
|
||||
extensions::{
|
||||
AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
|
||||
StructKind, TypeBoundKind, VisibilityKind,
|
||||
|
|
|
@ -28,7 +28,7 @@ impl ast::BinExpr {
|
|||
|
||||
impl ast::FnDef {
|
||||
#[must_use]
|
||||
pub fn with_body(&self, body: ast::Block) -> ast::FnDef {
|
||||
pub fn with_body(&self, body: ast::BlockExpr) -> ast::FnDef {
|
||||
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
|
||||
let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() {
|
||||
old_body.syntax().clone().into()
|
||||
|
|
|
@ -16,7 +16,7 @@ impl ast::Expr {
|
|||
| ast::Expr::WhileExpr(_)
|
||||
| ast::Expr::BlockExpr(_)
|
||||
| ast::Expr::MatchExpr(_)
|
||||
| ast::Expr::TryExpr(_) => true,
|
||||
| ast::Expr::EffectExpr(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -359,22 +359,34 @@ impl ast::Literal {
|
|||
}
|
||||
}
|
||||
|
||||
pub enum BlockModifier {
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Effect {
|
||||
Async(SyntaxToken),
|
||||
Unsafe(SyntaxToken),
|
||||
Try(SyntaxToken),
|
||||
// Very much not an effect, but we stuff it into this node anyway
|
||||
Label(ast::Label),
|
||||
}
|
||||
|
||||
impl ast::EffectExpr {
|
||||
pub fn effect(&self) -> Effect {
|
||||
if let Some(token) = self.async_token() {
|
||||
return Effect::Async(token);
|
||||
}
|
||||
if let Some(token) = self.unsafe_token() {
|
||||
return Effect::Unsafe(token);
|
||||
}
|
||||
if let Some(token) = self.try_token() {
|
||||
return Effect::Try(token);
|
||||
}
|
||||
if let Some(label) = self.label() {
|
||||
return Effect::Label(label);
|
||||
}
|
||||
unreachable!("ast::EffectExpr without Effect")
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::BlockExpr {
|
||||
pub fn modifier(&self) -> Option<BlockModifier> {
|
||||
if let Some(token) = self.async_token() {
|
||||
return Some(BlockModifier::Async(token));
|
||||
}
|
||||
if let Some(token) = self.unsafe_token() {
|
||||
return Some(BlockModifier::Unsafe(token));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// false if the block is an intrinsic part of the syntax and can't be
|
||||
/// replaced with arbitrary expression.
|
||||
///
|
||||
|
@ -383,15 +395,12 @@ impl ast::BlockExpr {
|
|||
/// const FOO: () = { stand_alone };
|
||||
/// ```
|
||||
pub fn is_standalone(&self) -> bool {
|
||||
if self.modifier().is_some() {
|
||||
return false;
|
||||
}
|
||||
let parent = match self.syntax().parent() {
|
||||
Some(it) => it,
|
||||
None => return true,
|
||||
};
|
||||
match parent.kind() {
|
||||
FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR => false,
|
||||
FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR | EFFECT_EXPR => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -407,7 +407,7 @@ impl ast::Visibility {
|
|||
} else if self.super_token().is_some() {
|
||||
VisibilityKind::PubSuper
|
||||
} else if self.self_token().is_some() {
|
||||
VisibilityKind::PubSuper
|
||||
VisibilityKind::PubSelf
|
||||
} else {
|
||||
VisibilityKind::Pub
|
||||
}
|
||||
|
|
|
@ -475,6 +475,19 @@ impl LoopExpr {
|
|||
pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct EffectExpr {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
impl ast::AttrsOwner for EffectExpr {}
|
||||
impl EffectExpr {
|
||||
pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
|
||||
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
|
||||
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
|
||||
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
|
||||
pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ForExpr {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
|
@ -541,11 +554,12 @@ pub struct BlockExpr {
|
|||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
impl ast::AttrsOwner for BlockExpr {}
|
||||
impl ast::ModuleItemOwner for BlockExpr {}
|
||||
impl BlockExpr {
|
||||
pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
|
||||
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
|
||||
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
|
||||
pub fn block(&self) -> Option<Block> { support::child(&self.syntax) }
|
||||
pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
|
||||
pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
|
||||
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
|
||||
pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
|
@ -617,8 +631,8 @@ pub struct TryExpr {
|
|||
}
|
||||
impl ast::AttrsOwner for TryExpr {}
|
||||
impl TryExpr {
|
||||
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
|
||||
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
|
||||
pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
|
@ -1111,19 +1125,6 @@ impl Condition {
|
|||
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Block {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
impl ast::AttrsOwner for Block {}
|
||||
impl ast::ModuleItemOwner for Block {}
|
||||
impl Block {
|
||||
pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
|
||||
pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
|
||||
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
|
||||
pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ParamList {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
|
@ -1241,6 +1242,8 @@ pub struct PathSegment {
|
|||
impl PathSegment {
|
||||
pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
|
||||
pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
|
||||
pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
|
||||
pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
|
||||
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
|
||||
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
|
||||
pub fn type_arg_list(&self) -> Option<TypeArgList> { support::child(&self.syntax) }
|
||||
|
@ -1465,6 +1468,7 @@ pub enum Expr {
|
|||
FieldExpr(FieldExpr),
|
||||
AwaitExpr(AwaitExpr),
|
||||
TryExpr(TryExpr),
|
||||
EffectExpr(EffectExpr),
|
||||
CastExpr(CastExpr),
|
||||
RefExpr(RefExpr),
|
||||
PrefixExpr(PrefixExpr),
|
||||
|
@ -1947,6 +1951,17 @@ impl AstNode for LoopExpr {
|
|||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
impl AstNode for EffectExpr {
|
||||
fn can_cast(kind: SyntaxKind) -> bool { kind == EFFECT_EXPR }
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
if Self::can_cast(syntax.kind()) {
|
||||
Some(Self { syntax })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
impl AstNode for ForExpr {
|
||||
fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
|
@ -2629,17 +2644,6 @@ impl AstNode for Condition {
|
|||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
impl AstNode for Block {
|
||||
fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK }
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
if Self::can_cast(syntax.kind()) {
|
||||
Some(Self { syntax })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
impl AstNode for ParamList {
|
||||
fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
|
@ -3288,6 +3292,9 @@ impl From<AwaitExpr> for Expr {
|
|||
impl From<TryExpr> for Expr {
|
||||
fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
|
||||
}
|
||||
impl From<EffectExpr> for Expr {
|
||||
fn from(node: EffectExpr) -> Expr { Expr::EffectExpr(node) }
|
||||
}
|
||||
impl From<CastExpr> for Expr {
|
||||
fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
|
||||
}
|
||||
|
@ -3318,8 +3325,10 @@ impl AstNode for Expr {
|
|||
TUPLE_EXPR | ARRAY_EXPR | PAREN_EXPR | PATH_EXPR | LAMBDA_EXPR | IF_EXPR
|
||||
| LOOP_EXPR | FOR_EXPR | WHILE_EXPR | CONTINUE_EXPR | BREAK_EXPR | LABEL
|
||||
| BLOCK_EXPR | RETURN_EXPR | MATCH_EXPR | RECORD_LIT | CALL_EXPR | INDEX_EXPR
|
||||
| METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | CAST_EXPR | REF_EXPR
|
||||
| PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | BOX_EXPR => true,
|
||||
| METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | EFFECT_EXPR | CAST_EXPR
|
||||
| REF_EXPR | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | BOX_EXPR => {
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -3347,6 +3356,7 @@ impl AstNode for Expr {
|
|||
FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
|
||||
AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
|
||||
TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
|
||||
EFFECT_EXPR => Expr::EffectExpr(EffectExpr { syntax }),
|
||||
CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
|
||||
REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
|
||||
PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
|
||||
|
@ -3383,6 +3393,7 @@ impl AstNode for Expr {
|
|||
Expr::FieldExpr(it) => &it.syntax,
|
||||
Expr::AwaitExpr(it) => &it.syntax,
|
||||
Expr::TryExpr(it) => &it.syntax,
|
||||
Expr::EffectExpr(it) => &it.syntax,
|
||||
Expr::CastExpr(it) => &it.syntax,
|
||||
Expr::RefExpr(it) => &it.syntax,
|
||||
Expr::PrefixExpr(it) => &it.syntax,
|
||||
|
@ -3863,6 +3874,11 @@ impl std::fmt::Display for LoopExpr {
|
|||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
}
|
||||
}
|
||||
impl std::fmt::Display for EffectExpr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
}
|
||||
}
|
||||
impl std::fmt::Display for ForExpr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
|
@ -4173,11 +4189,6 @@ impl std::fmt::Display for Condition {
|
|||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
}
|
||||
}
|
||||
impl std::fmt::Display for Block {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
}
|
||||
}
|
||||
impl std::fmt::Display for ParamList {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
|
|
|
@ -82,14 +82,6 @@ pub fn block_expr(
|
|||
ast_from_text(&format!("fn f() {}", buf))
|
||||
}
|
||||
|
||||
pub fn block_from_expr(e: ast::Expr) -> ast::Block {
|
||||
return from_text(&format!("{{ {} }}", e));
|
||||
|
||||
fn from_text(text: &str) -> ast::Block {
|
||||
ast_from_text(&format!("fn f() {}", text))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expr_unit() -> ast::Expr {
|
||||
expr_from_text("()")
|
||||
}
|
||||
|
|
|
@ -237,8 +237,7 @@ fn api_walkthrough() {
|
|||
|
||||
// Let's get the `1 + 1` expression!
|
||||
let body: ast::BlockExpr = func.body().unwrap();
|
||||
let block = body.block().unwrap();
|
||||
let expr: ast::Expr = block.expr().unwrap();
|
||||
let expr: ast::Expr = body.expr().unwrap();
|
||||
|
||||
// Enums are used to group related ast nodes together, and can be used for
|
||||
// matching. However, because there are no public fields, it's possible to
|
||||
|
@ -274,8 +273,8 @@ fn api_walkthrough() {
|
|||
assert_eq!(text.to_string(), "1 + 1");
|
||||
|
||||
// There's a bunch of traversal methods on `SyntaxNode`:
|
||||
assert_eq!(expr_syntax.parent().as_ref(), Some(block.syntax()));
|
||||
assert_eq!(block.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
|
||||
assert_eq!(expr_syntax.parent().as_ref(), Some(body.syntax()));
|
||||
assert_eq!(body.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
|
||||
assert_eq!(
|
||||
expr_syntax.next_sibling_or_token().map(|it| it.kind()),
|
||||
Some(SyntaxKind::WHITESPACE)
|
||||
|
|
|
@ -180,7 +180,7 @@ fn rustc_token_kind_to_syntax_kind(
|
|||
return (syntax_kind, None);
|
||||
|
||||
fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<&'static str>) {
|
||||
use rustc_lexer::LiteralKind as LK;
|
||||
use rustc_lexer::{LexRawStrError, LiteralKind as LK};
|
||||
|
||||
#[rustfmt::skip]
|
||||
let syntax_kind = match *kind {
|
||||
|
@ -215,21 +215,28 @@ fn rustc_token_kind_to_syntax_kind(
|
|||
return (BYTE_STRING, Some("Missing trailing `\"` symbol to terminate the byte string literal"))
|
||||
}
|
||||
|
||||
LK::RawStr { started: true, terminated: true, .. } => RAW_STRING,
|
||||
LK::RawStr { started: true, terminated: false, .. } => {
|
||||
return (RAW_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw string literal"))
|
||||
}
|
||||
LK::RawStr { started: false, .. } => {
|
||||
return (RAW_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw string literal"))
|
||||
}
|
||||
LK::RawStr(str) => match str.validate() {
|
||||
Ok(_) => RAW_STRING,
|
||||
Err(LexRawStrError::InvalidStarter) => return (RAW_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw string literal")),
|
||||
Err(LexRawStrError::NoTerminator { expected, found, .. }) => if expected == found {
|
||||
return (RAW_STRING, Some("Missing trailing `\"` to terminate the raw string literal"))
|
||||
} else {
|
||||
return (RAW_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw string literal"))
|
||||
|
||||
LK::RawByteStr { started: true, terminated: true, .. } => RAW_BYTE_STRING,
|
||||
LK::RawByteStr { started: true, terminated: false, .. } => {
|
||||
return (RAW_BYTE_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"))
|
||||
}
|
||||
LK::RawByteStr { started: false, .. } => {
|
||||
return (RAW_BYTE_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw byte string literal"))
|
||||
}
|
||||
},
|
||||
Err(LexRawStrError::TooManyDelimiters { .. }) => return (RAW_STRING, Some("Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols")),
|
||||
},
|
||||
LK::RawByteStr(str) => match str.validate() {
|
||||
Ok(_) => RAW_BYTE_STRING,
|
||||
Err(LexRawStrError::InvalidStarter) => return (RAW_BYTE_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw byte string literal")),
|
||||
Err(LexRawStrError::NoTerminator { expected, found, .. }) => if expected == found {
|
||||
return (RAW_BYTE_STRING, Some("Missing trailing `\"` to terminate the raw byte string literal"))
|
||||
} else {
|
||||
return (RAW_BYTE_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"))
|
||||
|
||||
},
|
||||
Err(LexRawStrError::TooManyDelimiters { .. }) => return (RAW_BYTE_STRING, Some("Too many `#` symbols: raw byte strings may be delimited by up to 65535 `#` symbols")),
|
||||
},
|
||||
};
|
||||
|
||||
(syntax_kind, None)
|
||||
|
|
|
@ -96,7 +96,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
|
|||
ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors),
|
||||
ast::Visibility(it) => validate_visibility(it, &mut errors),
|
||||
ast::RangeExpr(it) => validate_range_expr(it, &mut errors),
|
||||
ast::PathSegment(it) => validate_crate_keyword_in_path_segment(it, &mut errors),
|
||||
ast::PathSegment(it) => validate_path_keywords(it, &mut errors),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
@ -224,59 +224,82 @@ fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
|
|||
}
|
||||
}
|
||||
|
||||
fn validate_crate_keyword_in_path_segment(
|
||||
segment: ast::PathSegment,
|
||||
errors: &mut Vec<SyntaxError>,
|
||||
) {
|
||||
const ERR_MSG: &str = "The `crate` keyword is only allowed as the first segment of a path";
|
||||
fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxError>) {
|
||||
use ast::PathSegmentKind;
|
||||
|
||||
let crate_token = match segment.crate_token() {
|
||||
None => return,
|
||||
Some(it) => it,
|
||||
};
|
||||
let path = segment.parent_path();
|
||||
let is_path_start = segment.coloncolon_token().is_none() && path.qualifier().is_none();
|
||||
|
||||
// Disallow both ::crate and foo::crate
|
||||
let mut path = segment.parent_path();
|
||||
if segment.coloncolon_token().is_some() || path.qualifier().is_some() {
|
||||
errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range()));
|
||||
return;
|
||||
}
|
||||
if let Some(token) = segment.self_token() {
|
||||
if !is_path_start {
|
||||
errors.push(SyntaxError::new(
|
||||
"The `self` keyword is only allowed as the first segment of a path",
|
||||
token.text_range(),
|
||||
));
|
||||
}
|
||||
} else if let Some(token) = segment.crate_token() {
|
||||
if !is_path_start || use_prefix(path).is_some() {
|
||||
errors.push(SyntaxError::new(
|
||||
"The `crate` keyword is only allowed as the first segment of a path",
|
||||
token.text_range(),
|
||||
));
|
||||
}
|
||||
} else if let Some(token) = segment.super_token() {
|
||||
if !all_supers(&path) {
|
||||
errors.push(SyntaxError::new(
|
||||
"The `super` keyword may only be preceded by other `super`s",
|
||||
token.text_range(),
|
||||
));
|
||||
return;
|
||||
}
|
||||
|
||||
// For expressions and types, validation is complete, but we still have
|
||||
// to handle invalid UseItems like this:
|
||||
//
|
||||
// use foo:{crate::bar::baz};
|
||||
//
|
||||
// To handle this we must inspect the parent `UseItem`s and `UseTree`s
|
||||
// but right now we're looking deep inside the nested `Path` nodes because
|
||||
// `Path`s are left-associative:
|
||||
//
|
||||
// ((crate)::bar)::baz)
|
||||
// ^ current value of path
|
||||
//
|
||||
// So we need to climb to the top
|
||||
while let Some(parent) = path.parent_path() {
|
||||
path = parent;
|
||||
}
|
||||
|
||||
// Now that we've found the whole path we need to see if there's a prefix
|
||||
// somewhere in the UseTree hierarchy. This check is arbitrarily deep
|
||||
// because rust allows arbitrary nesting like so:
|
||||
//
|
||||
// use {foo::{{{{crate::bar::baz}}}}};
|
||||
for node in path.syntax().ancestors().skip(1) {
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::UseTree(it) => if let Some(tree_path) = it.path() {
|
||||
// Even a top-level path exists within a `UseTree` so we must explicitly
|
||||
// allow our path but disallow anything else
|
||||
if tree_path != path {
|
||||
errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range()));
|
||||
}
|
||||
},
|
||||
ast::UseTreeList(_it) => continue,
|
||||
_ => return,
|
||||
let mut curr_path = path;
|
||||
while let Some(prefix) = use_prefix(curr_path) {
|
||||
if !all_supers(&prefix) {
|
||||
errors.push(SyntaxError::new(
|
||||
"The `super` keyword may only be preceded by other `super`s",
|
||||
token.text_range(),
|
||||
));
|
||||
return;
|
||||
}
|
||||
curr_path = prefix;
|
||||
}
|
||||
}
|
||||
|
||||
fn use_prefix(mut path: ast::Path) -> Option<ast::Path> {
|
||||
for node in path.syntax().ancestors().skip(1) {
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::UseTree(it) => if let Some(tree_path) = it.path() {
|
||||
// Even a top-level path exists within a `UseTree` so we must explicitly
|
||||
// allow our path but disallow anything else
|
||||
if tree_path != path {
|
||||
return Some(tree_path);
|
||||
}
|
||||
},
|
||||
ast::UseTreeList(_it) => continue,
|
||||
ast::Path(parent) => path = parent,
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
fn all_supers(path: &ast::Path) -> bool {
|
||||
let segment = match path.segment() {
|
||||
Some(it) => it,
|
||||
None => return false,
|
||||
};
|
||||
|
||||
if segment.kind() != Some(PathSegmentKind::SuperKw) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Some(ref subpath) = path.qualifier() {
|
||||
return all_supers(subpath);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,19 +6,17 @@ use crate::{
|
|||
SyntaxKind::*,
|
||||
};
|
||||
|
||||
pub(crate) fn validate_block_expr(expr: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
|
||||
if let Some(parent) = expr.syntax().parent() {
|
||||
pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
|
||||
if let Some(parent) = block.syntax().parent() {
|
||||
match parent.kind() {
|
||||
FN_DEF | EXPR_STMT | BLOCK => return,
|
||||
FN_DEF | EXPR_STMT | BLOCK_EXPR => return,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if let Some(block) = expr.block() {
|
||||
errors.extend(block.attrs().map(|attr| {
|
||||
SyntaxError::new(
|
||||
"A block in this position cannot accept inner attributes",
|
||||
attr.syntax().text_range(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
errors.extend(block.attrs().map(|attr| {
|
||||
SyntaxError::new(
|
||||
"A block in this position cannot accept inner attributes",
|
||||
attr.syntax().text_range(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue