mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-03 07:04:49 +00:00
Add raw idents to lexer and parser
This commit is contained in:
parent
0b942cbcb0
commit
1cd6d6539a
26 changed files with 124 additions and 40 deletions
|
@ -142,7 +142,7 @@ impl Attr {
|
||||||
pub fn as_atom(&self) -> Option<SmolStr> {
|
pub fn as_atom(&self) -> Option<SmolStr> {
|
||||||
let tt = self.value()?;
|
let tt = self.value()?;
|
||||||
let (_bra, attr, _ket) = tt.syntax().children().collect_tuple()?;
|
let (_bra, attr, _ket) = tt.syntax().children().collect_tuple()?;
|
||||||
if attr.kind() == IDENT {
|
if attr.kind().is_ident() {
|
||||||
Some(attr.leaf_text().unwrap().clone())
|
Some(attr.leaf_text().unwrap().clone())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -153,7 +153,7 @@ impl Attr {
|
||||||
let tt = self.value()?;
|
let tt = self.value()?;
|
||||||
let (_bra, attr, args, _ket) = tt.syntax().children().collect_tuple()?;
|
let (_bra, attr, args, _ket) = tt.syntax().children().collect_tuple()?;
|
||||||
let args = TokenTree::cast(args)?;
|
let args = TokenTree::cast(args)?;
|
||||||
if attr.kind() == IDENT {
|
if attr.kind().is_ident() {
|
||||||
Some((attr.leaf_text().unwrap().clone(), args))
|
Some((attr.leaf_text().unwrap().clone(), args))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
|
|
@ -102,6 +102,7 @@ Grammar(
|
||||||
tokens: [
|
tokens: [
|
||||||
"ERROR",
|
"ERROR",
|
||||||
"IDENT",
|
"IDENT",
|
||||||
|
"RAW_IDENT",
|
||||||
"UNDERSCORE",
|
"UNDERSCORE",
|
||||||
"WHITESPACE",
|
"WHITESPACE",
|
||||||
"INT_NUMBER",
|
"INT_NUMBER",
|
||||||
|
@ -116,6 +117,10 @@ Grammar(
|
||||||
"COMMENT",
|
"COMMENT",
|
||||||
"SHEBANG",
|
"SHEBANG",
|
||||||
],
|
],
|
||||||
|
ident_tokens: [
|
||||||
|
"IDENT",
|
||||||
|
"RAW_IDENT",
|
||||||
|
],
|
||||||
nodes: [
|
nodes: [
|
||||||
"SOURCE_FILE",
|
"SOURCE_FILE",
|
||||||
|
|
||||||
|
|
|
@ -140,7 +140,7 @@ fn opt_fn_ret_type(p: &mut Parser) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name_r(p: &mut Parser, recovery: TokenSet) {
|
fn name_r(p: &mut Parser, recovery: TokenSet) {
|
||||||
if p.at(IDENT) {
|
if p.current().is_ident() {
|
||||||
let m = p.start();
|
let m = p.start();
|
||||||
p.bump();
|
p.bump();
|
||||||
m.complete(p, NAME);
|
m.complete(p, NAME);
|
||||||
|
@ -154,7 +154,7 @@ fn name(p: &mut Parser) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name_ref(p: &mut Parser) {
|
fn name_ref(p: &mut Parser) {
|
||||||
if p.at(IDENT) {
|
if p.current().is_ident() {
|
||||||
let m = p.start();
|
let m = p.start();
|
||||||
p.bump();
|
p.bump();
|
||||||
m.complete(p, NAME_REF);
|
m.complete(p, NAME_REF);
|
||||||
|
|
|
@ -281,7 +281,7 @@ fn postfix_expr(
|
||||||
// }
|
// }
|
||||||
L_PAREN if allow_calls => call_expr(p, lhs),
|
L_PAREN if allow_calls => call_expr(p, lhs),
|
||||||
L_BRACK if allow_calls => index_expr(p, lhs),
|
L_BRACK if allow_calls => index_expr(p, lhs),
|
||||||
DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => {
|
DOT if p.nth(1).is_ident() && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => {
|
||||||
method_call_expr(p, lhs)
|
method_call_expr(p, lhs)
|
||||||
}
|
}
|
||||||
DOT => field_expr(p, lhs),
|
DOT => field_expr(p, lhs),
|
||||||
|
@ -332,7 +332,7 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||||
// y.bar::<T>(1, 2,);
|
// y.bar::<T>(1, 2,);
|
||||||
// }
|
// }
|
||||||
fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||||
assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON));
|
assert!(p.at(DOT) && p.nth(1).is_ident() && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON));
|
||||||
let m = lhs.precede(p);
|
let m = lhs.precede(p);
|
||||||
p.bump();
|
p.bump();
|
||||||
name_ref(p);
|
name_ref(p);
|
||||||
|
@ -352,7 +352,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
|
||||||
assert!(p.at(DOT));
|
assert!(p.at(DOT));
|
||||||
let m = lhs.precede(p);
|
let m = lhs.precede(p);
|
||||||
p.bump();
|
p.bump();
|
||||||
if p.at(IDENT) {
|
if p.current().is_ident() {
|
||||||
name_ref(p)
|
name_ref(p)
|
||||||
} else if p.at(INT_NUMBER) {
|
} else if p.at(INT_NUMBER) {
|
||||||
p.bump()
|
p.bump()
|
||||||
|
@ -443,7 +443,7 @@ pub(crate) fn named_field_list(p: &mut Parser) {
|
||||||
p.bump();
|
p.bump();
|
||||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||||
match p.current() {
|
match p.current() {
|
||||||
IDENT => {
|
IDENT | RAW_IDENT => {
|
||||||
let m = p.start();
|
let m = p.start();
|
||||||
name_ref(p);
|
name_ref(p);
|
||||||
if p.eat(COLON) {
|
if p.eat(COLON) {
|
||||||
|
|
|
@ -48,6 +48,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = LITERAL_FIRST.union(token_set![
|
||||||
UNSAFE_KW,
|
UNSAFE_KW,
|
||||||
RETURN_KW,
|
RETURN_KW,
|
||||||
IDENT,
|
IDENT,
|
||||||
|
RAW_IDENT,
|
||||||
SELF_KW,
|
SELF_KW,
|
||||||
SUPER_KW,
|
SUPER_KW,
|
||||||
CRATE_KW,
|
CRATE_KW,
|
||||||
|
|
|
@ -99,11 +99,11 @@ pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem {
|
||||||
has_mods = true;
|
has_mods = true;
|
||||||
abi(p);
|
abi(p);
|
||||||
}
|
}
|
||||||
if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == TRAIT_KW {
|
if p.current().is_ident() && p.at_contextual_kw("auto") && p.nth(1) == TRAIT_KW {
|
||||||
p.bump_remap(AUTO_KW);
|
p.bump_remap(AUTO_KW);
|
||||||
has_mods = true;
|
has_mods = true;
|
||||||
}
|
}
|
||||||
if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == IMPL_KW {
|
if p.current().is_ident() && p.at_contextual_kw("default") && p.nth(1) == IMPL_KW {
|
||||||
p.bump_remap(DEFAULT_KW);
|
p.bump_remap(DEFAULT_KW);
|
||||||
has_mods = true;
|
has_mods = true;
|
||||||
}
|
}
|
||||||
|
@ -202,7 +202,7 @@ fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> {
|
||||||
}
|
}
|
||||||
STRUCT_DEF
|
STRUCT_DEF
|
||||||
}
|
}
|
||||||
IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => {
|
IDENT | RAW_IDENT if p.at_contextual_kw("union") && p.nth(1).is_ident() => {
|
||||||
// test union_items
|
// test union_items
|
||||||
// union Foo {}
|
// union Foo {}
|
||||||
// union Foo {
|
// union Foo {
|
||||||
|
@ -220,7 +220,7 @@ fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> {
|
||||||
use_item::use_item(p);
|
use_item::use_item(p);
|
||||||
USE_ITEM
|
USE_ITEM
|
||||||
}
|
}
|
||||||
CONST_KW if (la == IDENT || la == MUT_KW) => {
|
CONST_KW if (la.is_ident() || la == MUT_KW) => {
|
||||||
consts::const_def(p);
|
consts::const_def(p);
|
||||||
CONST_DEF
|
CONST_DEF
|
||||||
}
|
}
|
||||||
|
@ -351,7 +351,7 @@ fn macro_call(p: &mut Parser) -> BlockLike {
|
||||||
|
|
||||||
pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
|
pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
|
||||||
p.expect(EXCL);
|
p.expect(EXCL);
|
||||||
p.eat(IDENT);
|
p.eat_one(&[IDENT, RAW_IDENT]);
|
||||||
match p.current() {
|
match p.current() {
|
||||||
L_CURLY => {
|
L_CURLY => {
|
||||||
token_tree(p);
|
token_tree(p);
|
||||||
|
|
|
@ -70,7 +70,7 @@ pub(crate) fn enum_variant_list(p: &mut Parser) {
|
||||||
}
|
}
|
||||||
let var = p.start();
|
let var = p.start();
|
||||||
attributes::outer_attributes(p);
|
attributes::outer_attributes(p);
|
||||||
if p.at(IDENT) {
|
if p.current().is_ident() {
|
||||||
name(p);
|
name(p);
|
||||||
match p.current() {
|
match p.current() {
|
||||||
L_CURLY => named_field_def_list(p),
|
L_CURLY => named_field_def_list(p),
|
||||||
|
@ -120,7 +120,7 @@ pub(crate) fn named_field_def_list(p: &mut Parser) {
|
||||||
// }
|
// }
|
||||||
attributes::outer_attributes(p);
|
attributes::outer_attributes(p);
|
||||||
opt_visibility(p);
|
opt_visibility(p);
|
||||||
if p.at(IDENT) {
|
if p.current().is_ident() {
|
||||||
name(p);
|
name(p);
|
||||||
p.expect(COLON);
|
p.expect(COLON);
|
||||||
types::type_(p);
|
types::type_(p);
|
||||||
|
|
|
@ -112,7 +112,7 @@ fn choose_type_params_over_qpath(p: &Parser) -> bool {
|
||||||
if p.nth(1) == POUND || p.nth(1) == R_ANGLE {
|
if p.nth(1) == POUND || p.nth(1) == R_ANGLE {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
(p.nth(1) == LIFETIME || p.nth(1) == IDENT)
|
(p.nth(1) == LIFETIME || p.nth(1).is_ident())
|
||||||
&& (p.nth(2) == R_ANGLE || p.nth(2) == COMMA || p.nth(2) == COLON || p.nth(2) == EQ)
|
&& (p.nth(2) == R_ANGLE || p.nth(2) == COMMA || p.nth(2) == COLON || p.nth(2) == EQ)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -84,9 +84,9 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
|
||||||
// trait Foo {
|
// trait Foo {
|
||||||
// fn bar(_: u64);
|
// fn bar(_: u64);
|
||||||
// }
|
// }
|
||||||
if (la0 == IDENT || la0 == UNDERSCORE) && la1 == COLON
|
if (la0.is_ident() || la0 == UNDERSCORE) && la1 == COLON
|
||||||
|| la0 == AMP && la1 == IDENT && la2 == COLON
|
|| la0 == AMP && la1.is_ident() && la2 == COLON
|
||||||
|| la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON
|
|| la0 == AMP && la1 == MUT_KW && la2.is_ident() && la3 == COLON
|
||||||
{
|
{
|
||||||
patterns::pattern(p);
|
patterns::pattern(p);
|
||||||
types::ascription(p);
|
types::ascription(p);
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub(super) const PATH_FIRST: TokenSet =
|
pub(super) const PATH_FIRST: TokenSet =
|
||||||
token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, L_ANGLE];
|
token_set![IDENT, RAW_IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, L_ANGLE];
|
||||||
|
|
||||||
pub(super) fn is_path_start(p: &Parser) -> bool {
|
pub(super) fn is_path_start(p: &Parser) -> bool {
|
||||||
match p.current() {
|
match p.current() {
|
||||||
IDENT | SELF_KW | SUPER_KW | CRATE_KW | COLONCOLON => true,
|
IDENT | RAW_IDENT | SELF_KW | SUPER_KW | CRATE_KW | COLONCOLON => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) {
|
||||||
p.eat(COLONCOLON);
|
p.eat(COLONCOLON);
|
||||||
}
|
}
|
||||||
match p.current() {
|
match p.current() {
|
||||||
IDENT => {
|
IDENT | RAW_IDENT => {
|
||||||
name_ref(p);
|
name_ref(p);
|
||||||
opt_path_type_args(p, mode);
|
opt_path_type_args(p, mode);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
||||||
let la1 = p.nth(1);
|
let la1 = p.nth(1);
|
||||||
if la0 == REF_KW
|
if la0 == REF_KW
|
||||||
|| la0 == MUT_KW
|
|| la0 == MUT_KW
|
||||||
|| (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY))
|
|| (la0.is_ident() && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY))
|
||||||
{
|
{
|
||||||
return Some(bind_pat(p, true));
|
return Some(bind_pat(p, true));
|
||||||
}
|
}
|
||||||
|
@ -128,7 +128,7 @@ fn field_pat_list(p: &mut Parser) {
|
||||||
while !p.at(EOF) && !p.at(R_CURLY) {
|
while !p.at(EOF) && !p.at(R_CURLY) {
|
||||||
match p.current() {
|
match p.current() {
|
||||||
DOTDOT => p.bump(),
|
DOTDOT => p.bump(),
|
||||||
IDENT if p.nth(1) == COLON => field_pat(p),
|
IDENT | RAW_IDENT if p.nth(1) == COLON => field_pat(p),
|
||||||
L_CURLY => error_block(p, "expected ident"),
|
L_CURLY => error_block(p, "expected ident"),
|
||||||
_ => {
|
_ => {
|
||||||
bind_pat(p, false);
|
bind_pat(p, false);
|
||||||
|
@ -143,7 +143,7 @@ fn field_pat_list(p: &mut Parser) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn field_pat(p: &mut Parser) {
|
fn field_pat(p: &mut Parser) {
|
||||||
assert!(p.at(IDENT));
|
assert!(p.current().is_ident());
|
||||||
assert!(p.nth(1) == COLON);
|
assert!(p.nth(1) == COLON);
|
||||||
|
|
||||||
let m = p.start();
|
let m = p.start();
|
||||||
|
|
|
@ -34,7 +34,7 @@ fn type_arg(p: &mut Parser) {
|
||||||
p.bump();
|
p.bump();
|
||||||
m.complete(p, LIFETIME_ARG);
|
m.complete(p, LIFETIME_ARG);
|
||||||
}
|
}
|
||||||
IDENT if p.nth(1) == EQ => {
|
IDENT | RAW_IDENT if p.nth(1) == EQ => {
|
||||||
name_ref(p);
|
name_ref(p);
|
||||||
p.bump();
|
p.bump();
|
||||||
types::type_(p);
|
types::type_(p);
|
||||||
|
|
|
@ -15,7 +15,7 @@ fn type_param_list(p: &mut Parser) {
|
||||||
while !p.at(EOF) && !p.at(R_ANGLE) {
|
while !p.at(EOF) && !p.at(R_ANGLE) {
|
||||||
match p.current() {
|
match p.current() {
|
||||||
LIFETIME => lifetime_param(p),
|
LIFETIME => lifetime_param(p),
|
||||||
IDENT => type_param(p),
|
IDENT | RAW_IDENT => type_param(p),
|
||||||
_ => p.err_and_bump("expected type parameter"),
|
_ => p.err_and_bump("expected type parameter"),
|
||||||
}
|
}
|
||||||
if !p.at(R_ANGLE) && !p.expect(COMMA) {
|
if !p.at(R_ANGLE) && !p.expect(COMMA) {
|
||||||
|
@ -37,7 +37,7 @@ fn lifetime_param(p: &mut Parser) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_param(p: &mut Parser) {
|
fn type_param(p: &mut Parser) {
|
||||||
assert!(p.at(IDENT));
|
assert!(p.current().is_ident());
|
||||||
let m = p.start();
|
let m = p.start();
|
||||||
name(p);
|
name(p);
|
||||||
if p.at(COLON) {
|
if p.at(COLON) {
|
||||||
|
|
|
@ -190,19 +190,24 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_ident(c: char, ptr: &mut Ptr) -> SyntaxKind {
|
fn scan_ident(c: char, ptr: &mut Ptr) -> SyntaxKind {
|
||||||
let is_single_letter = match ptr.current() {
|
let is_raw = match (c, ptr.current()) {
|
||||||
None => true,
|
('r', Some('#')) => {
|
||||||
Some(c) if !is_ident_continue(c) => true,
|
ptr.bump();
|
||||||
|
true
|
||||||
|
}
|
||||||
|
('_', Some(c)) if !is_ident_continue(c) => return UNDERSCORE,
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
if is_single_letter {
|
|
||||||
return if c == '_' { UNDERSCORE } else { IDENT };
|
|
||||||
}
|
|
||||||
ptr.bump_while(is_ident_continue);
|
ptr.bump_while(is_ident_continue);
|
||||||
if let Some(kind) = SyntaxKind::from_keyword(ptr.current_token_text()) {
|
|
||||||
|
if is_raw {
|
||||||
|
RAW_IDENT
|
||||||
|
} else if let Some(kind) = SyntaxKind::from_keyword(ptr.current_token_text()) {
|
||||||
return kind;
|
return kind;
|
||||||
|
} else {
|
||||||
|
IDENT
|
||||||
}
|
}
|
||||||
IDENT
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_literal_suffix(ptr: &mut Ptr) {
|
fn scan_literal_suffix(ptr: &mut Ptr) {
|
||||||
|
|
|
@ -5,7 +5,8 @@ use crate::lexer::ptr::Ptr;
|
||||||
pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool {
|
pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool {
|
||||||
match (c, c1, c2) {
|
match (c, c1, c2) {
|
||||||
('r', Some('"'), _)
|
('r', Some('"'), _)
|
||||||
| ('r', Some('#'), _)
|
| ('r', Some('#'), Some('"'))
|
||||||
|
| ('r', Some('#'), Some('#'))
|
||||||
| ('b', Some('"'), _)
|
| ('b', Some('"'), _)
|
||||||
| ('b', Some('\''), _)
|
| ('b', Some('\''), _)
|
||||||
| ('b', Some('r'), Some('"'))
|
| ('b', Some('r'), Some('"'))
|
||||||
|
|
|
@ -100,6 +100,14 @@ impl<'t> Parser<'t> {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Consume the next token matching one of the `kinds`
|
||||||
|
pub(crate) fn eat_one<'k, K>(&mut self, kinds: K) -> bool
|
||||||
|
where
|
||||||
|
K: IntoIterator<Item = &'k SyntaxKind> + 'k,
|
||||||
|
{
|
||||||
|
kinds.into_iter().map(|k| self.eat(*k)).any(|eaten| eaten)
|
||||||
|
}
|
||||||
|
|
||||||
/// Consume the next token if it is `kind` or emit an error
|
/// Consume the next token if it is `kind` or emit an error
|
||||||
/// otherwise.
|
/// otherwise.
|
||||||
pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
|
pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
|
||||||
|
|
|
@ -25,7 +25,7 @@ fn reparse_leaf<'node>(
|
||||||
) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
|
) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
|
||||||
let node = algo::find_covering_node(node, edit.delete);
|
let node = algo::find_covering_node(node, edit.delete);
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
|
WHITESPACE | COMMENT | IDENT | RAW_IDENT | STRING | RAW_STRING => {
|
||||||
let text = get_text_after_edit(node, &edit);
|
let text = get_text_after_edit(node, &edit);
|
||||||
let tokens = tokenize(&text);
|
let tokens = tokenize(&text);
|
||||||
let token = match tokens[..] {
|
let token = match tokens[..] {
|
||||||
|
@ -33,7 +33,7 @@ fn reparse_leaf<'node>(
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if token.kind == IDENT && is_contextual_kw(&text) {
|
if token.kind.is_ident() && is_contextual_kw(&text) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -105,6 +105,7 @@ pub enum SyntaxKind {
|
||||||
UNION_KW,
|
UNION_KW,
|
||||||
ERROR,
|
ERROR,
|
||||||
IDENT,
|
IDENT,
|
||||||
|
RAW_IDENT,
|
||||||
UNDERSCORE,
|
UNDERSCORE,
|
||||||
WHITESPACE,
|
WHITESPACE,
|
||||||
INT_NUMBER,
|
INT_NUMBER,
|
||||||
|
@ -368,6 +369,7 @@ impl SyntaxKind {
|
||||||
UNION_KW => &SyntaxInfo { name: "UNION_KW" },
|
UNION_KW => &SyntaxInfo { name: "UNION_KW" },
|
||||||
ERROR => &SyntaxInfo { name: "ERROR" },
|
ERROR => &SyntaxInfo { name: "ERROR" },
|
||||||
IDENT => &SyntaxInfo { name: "IDENT" },
|
IDENT => &SyntaxInfo { name: "IDENT" },
|
||||||
|
RAW_IDENT => &SyntaxInfo { name: "RAW_IDENT" },
|
||||||
UNDERSCORE => &SyntaxInfo { name: "UNDERSCORE" },
|
UNDERSCORE => &SyntaxInfo { name: "UNDERSCORE" },
|
||||||
WHITESPACE => &SyntaxInfo { name: "WHITESPACE" },
|
WHITESPACE => &SyntaxInfo { name: "WHITESPACE" },
|
||||||
INT_NUMBER => &SyntaxInfo { name: "INT_NUMBER" },
|
INT_NUMBER => &SyntaxInfo { name: "INT_NUMBER" },
|
||||||
|
@ -563,4 +565,12 @@ impl SyntaxKind {
|
||||||
};
|
};
|
||||||
Some(tok)
|
Some(tok)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_ident(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
| IDENT
|
||||||
|
| RAW_IDENT => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,4 +74,13 @@ impl SyntaxKind {
|
||||||
};
|
};
|
||||||
Some(tok)
|
Some(tok)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_ident(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
{%- for kind in ident_tokens %}
|
||||||
|
| {{kind}}
|
||||||
|
{%- endfor %} => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -207,7 +207,7 @@ impl<'a> Iterator for SyntaxNodeChildren<'a> {
|
||||||
fn has_short_text(kind: SyntaxKind) -> bool {
|
fn has_short_text(kind: SyntaxKind) -> bool {
|
||||||
use crate::SyntaxKind::*;
|
use crate::SyntaxKind::*;
|
||||||
match kind {
|
match kind {
|
||||||
IDENT | LIFETIME | INT_NUMBER | FLOAT_NUMBER => true,
|
IDENT | RAW_IDENT | LIFETIME | INT_NUMBER | FLOAT_NUMBER => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
1
crates/ra_syntax/tests/data/lexer/0016_raw_ident.rs
Normal file
1
crates/ra_syntax/tests/data/lexer/0016_raw_ident.rs
Normal file
|
@ -0,0 +1 @@
|
||||||
|
r#raw_ident
|
2
crates/ra_syntax/tests/data/lexer/0016_raw_ident.txt
Normal file
2
crates/ra_syntax/tests/data/lexer/0016_raw_ident.txt
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
RAW_IDENT 11 "r#raw_ident"
|
||||||
|
WHITESPACE 1 "\n"
|
|
@ -0,0 +1,2 @@
|
||||||
|
fn r#foo() {
|
||||||
|
}
|
15
crates/ra_syntax/tests/data/parser/ok/0039_raw_fn_item.txt
Normal file
15
crates/ra_syntax/tests/data/parser/ok/0039_raw_fn_item.txt
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
SOURCE_FILE@[0; 15)
|
||||||
|
FN_DEF@[0; 14)
|
||||||
|
FN_KW@[0; 2)
|
||||||
|
WHITESPACE@[2; 3)
|
||||||
|
NAME@[3; 8)
|
||||||
|
RAW_IDENT@[3; 8) "r#foo"
|
||||||
|
PARAM_LIST@[8; 10)
|
||||||
|
L_PAREN@[8; 9)
|
||||||
|
R_PAREN@[9; 10)
|
||||||
|
WHITESPACE@[10; 11)
|
||||||
|
BLOCK@[11; 14)
|
||||||
|
L_CURLY@[11; 12)
|
||||||
|
WHITESPACE@[12; 13)
|
||||||
|
R_CURLY@[13; 14)
|
||||||
|
WHITESPACE@[14; 15)
|
|
@ -0,0 +1,3 @@
|
||||||
|
struct S {
|
||||||
|
r#foo: u32
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
SOURCE_FILE@[0; 27)
|
||||||
|
STRUCT_DEF@[0; 27)
|
||||||
|
STRUCT_KW@[0; 6)
|
||||||
|
WHITESPACE@[6; 7)
|
||||||
|
NAME@[7; 8)
|
||||||
|
IDENT@[7; 8) "S"
|
||||||
|
WHITESPACE@[8; 9)
|
||||||
|
NAMED_FIELD_DEF_LIST@[9; 27)
|
||||||
|
L_CURLY@[9; 10)
|
||||||
|
WHITESPACE@[10; 15)
|
||||||
|
NAMED_FIELD_DEF@[15; 25)
|
||||||
|
NAME@[15; 20)
|
||||||
|
RAW_IDENT@[15; 20) "r#foo"
|
||||||
|
COLON@[20; 21)
|
||||||
|
WHITESPACE@[21; 22)
|
||||||
|
PATH_TYPE@[22; 25)
|
||||||
|
PATH@[22; 25)
|
||||||
|
PATH_SEGMENT@[22; 25)
|
||||||
|
NAME_REF@[22; 25)
|
||||||
|
IDENT@[22; 25) "u32"
|
||||||
|
WHITESPACE@[25; 26)
|
||||||
|
R_CURLY@[26; 27)
|
Loading…
Add table
Add a link
Reference in a new issue