mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Add classify_literal and undo expose next_token
This commit is contained in:
parent
1ab78d6056
commit
1ea0238e53
5 changed files with 15 additions and 8 deletions
|
@ -343,16 +343,14 @@ SOURCE_FILE@[0; 40)
|
||||||
if let tt::TokenTree::Subtree(subtree) = tt {
|
if let tt::TokenTree::Subtree(subtree) = tt {
|
||||||
return &subtree;
|
return &subtree;
|
||||||
}
|
}
|
||||||
assert!(false, "It is not a subtree");
|
unreachable!("It is not a subtree");
|
||||||
unreachable!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_literal(tt: &tt::TokenTree) -> &tt::Literal {
|
fn to_literal(tt: &tt::TokenTree) -> &tt::Literal {
|
||||||
if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt {
|
if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt {
|
||||||
return lit;
|
return lit;
|
||||||
}
|
}
|
||||||
assert!(false, "It is not a literal");
|
unreachable!("It is not a literal");
|
||||||
unreachable!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use ra_parser::{TokenSource, TreeSink, ParseError};
|
use ra_parser::{TokenSource, TreeSink, ParseError};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
|
AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
|
||||||
ast, SyntaxKind::*, TextUnit, next_token
|
ast, SyntaxKind::*, TextUnit, classify_literal
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Maps `tt::TokenId` to the relative range of the original token.
|
/// Maps `tt::TokenId` to the relative range of the original token.
|
||||||
|
@ -189,7 +189,7 @@ impl TtTokenSource {
|
||||||
{
|
{
|
||||||
let tok = match token {
|
let tok = match token {
|
||||||
tt::Leaf::Literal(l) => TtToken {
|
tt::Leaf::Literal(l) => TtToken {
|
||||||
kind: next_token(&l.text).kind,
|
kind: classify_literal(&l.text).unwrap().kind,
|
||||||
is_joint_to_next: false,
|
is_joint_to_next: false,
|
||||||
text: l.text.clone(),
|
text: l.text.clone(),
|
||||||
},
|
},
|
||||||
|
|
|
@ -40,7 +40,7 @@ pub use crate::{
|
||||||
syntax_text::SyntaxText,
|
syntax_text::SyntaxText,
|
||||||
syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken},
|
syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken},
|
||||||
ptr::{SyntaxNodePtr, AstPtr},
|
ptr::{SyntaxNodePtr, AstPtr},
|
||||||
parsing::{tokenize, next_token, Token},
|
parsing::{tokenize, classify_literal, Token},
|
||||||
};
|
};
|
||||||
|
|
||||||
use ra_text_edit::AtomTextEdit;
|
use ra_text_edit::AtomTextEdit;
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
syntax_node::GreenNode,
|
syntax_node::GreenNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use self::lexer::{tokenize, next_token, Token};
|
pub use self::lexer::{tokenize, classify_literal, Token};
|
||||||
|
|
||||||
pub(crate) use self::reparsing::incremental_reparse;
|
pub(crate) use self::reparsing::incremental_reparse;
|
||||||
|
|
||||||
|
|
|
@ -214,3 +214,12 @@ fn scan_literal_suffix(ptr: &mut Ptr) {
|
||||||
}
|
}
|
||||||
ptr.bump_while(is_ident_continue);
|
ptr.bump_while(is_ident_continue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn classify_literal(text: &str) -> Option<Token> {
|
||||||
|
let tkn = next_token(text);
|
||||||
|
if tkn.kind.is_literal() || tkn.len.to_usize() != text.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(tkn)
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue