mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 04:44:57 +00:00
Fix bug and add expr , pat , ty matcher
This commit is contained in:
parent
f66300ccd1
commit
6646d49f23
10 changed files with 307 additions and 33 deletions
|
@ -189,6 +189,14 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
|||
rules.expand(&invocation_tt).unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn expand_to_syntax(
|
||||
rules: &MacroRules,
|
||||
invocation: &str,
|
||||
) -> ra_syntax::TreeArc<ast::SourceFile> {
|
||||
let expanded = expand(rules, invocation);
|
||||
token_tree_to_ast_item_list(&expanded)
|
||||
}
|
||||
|
||||
pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
|
||||
let expanded = expand(rules, invocation);
|
||||
assert_eq!(expanded.to_string(), expansion);
|
||||
|
@ -485,4 +493,93 @@ SOURCE_FILE@[0; 40)
|
|||
);
|
||||
assert_expansion(&rules, "foo! { foo }", "fn foo () {let a = foo :: bar ;}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expr() {
|
||||
let rules = create_rules(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($ i:expr) => {
|
||||
fn bar() { $ i; }
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_expansion(
|
||||
&rules,
|
||||
"foo! { 2 + 2 * baz(3).quux() }",
|
||||
"fn bar () {2 + 2 * baz (3) . quux () ;}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expr_order() {
|
||||
let rules = create_rules(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($ i:expr) => {
|
||||
fn bar() { $ i * 2; }
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
expand_to_syntax(&rules, "foo! { 1 + 1 }").syntax().debug_dump().trim(),
|
||||
r#"SOURCE_FILE@[0; 15)
|
||||
FN_DEF@[0; 15)
|
||||
FN_KW@[0; 2) "fn"
|
||||
NAME@[2; 5)
|
||||
IDENT@[2; 5) "bar"
|
||||
PARAM_LIST@[5; 7)
|
||||
L_PAREN@[5; 6) "("
|
||||
R_PAREN@[6; 7) ")"
|
||||
BLOCK@[7; 15)
|
||||
L_CURLY@[7; 8) "{"
|
||||
EXPR_STMT@[8; 14)
|
||||
BIN_EXPR@[8; 13)
|
||||
BIN_EXPR@[8; 11)
|
||||
LITERAL@[8; 9)
|
||||
INT_NUMBER@[8; 9) "1"
|
||||
PLUS@[9; 10) "+"
|
||||
LITERAL@[10; 11)
|
||||
INT_NUMBER@[10; 11) "1"
|
||||
STAR@[11; 12) "*"
|
||||
LITERAL@[12; 13)
|
||||
INT_NUMBER@[12; 13) "2"
|
||||
SEMI@[13; 14) ";"
|
||||
R_CURLY@[14; 15) "}""#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ty() {
|
||||
let rules = create_rules(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($ i:ty) => (
|
||||
fn bar() -> $ i { unimplemented!() }
|
||||
)
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_expansion(
|
||||
&rules,
|
||||
"foo! { Baz<u8> }",
|
||||
"fn bar () -> Baz < u8 > {unimplemented ! ()}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pat_() {
|
||||
let rules = create_rules(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
($ i:pat) => { fn foo() { let $ i; } }
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_expansion(&rules, "foo! { (a, b) }", "fn foo () {let (a , b) ;}");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -144,6 +144,19 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
|||
input.eat_path().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(path.into()));
|
||||
}
|
||||
"expr" => {
|
||||
let expr =
|
||||
input.eat_expr().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(expr.into()));
|
||||
}
|
||||
"ty" => {
|
||||
let ty = input.eat_ty().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(ty.into()));
|
||||
}
|
||||
"pat" => {
|
||||
let pat = input.eat_pat().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(pat.into()));
|
||||
}
|
||||
_ => return Err(ExpandError::UnexpectedToken),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,18 @@ impl<'a> Parser<'a> {
|
|||
self.parse(ra_parser::parse_path)
|
||||
}
|
||||
|
||||
pub fn parse_expr(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_expr)
|
||||
}
|
||||
|
||||
pub fn parse_ty(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_ty)
|
||||
}
|
||||
|
||||
pub fn parse_pat(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_pat)
|
||||
}
|
||||
|
||||
fn parse<F>(self, f: F) -> Option<tt::TokenTree>
|
||||
where
|
||||
F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
|
||||
|
|
|
@ -109,6 +109,8 @@ impl<'a> SubTreeWalker<'a> {
|
|||
self.cursor = match self.ts.get(0) {
|
||||
DelimToken::Token(token) => match token {
|
||||
tt::TokenTree::Subtree(subtree) => {
|
||||
let ts = TokenSeq::from(subtree);
|
||||
self.stack.push((ts, 0));
|
||||
WalkCursor::Token(0, convert_delim(subtree.delimiter, false))
|
||||
}
|
||||
tt::TokenTree::Leaf(leaf) => {
|
||||
|
@ -254,7 +256,7 @@ impl<'a> WalkerOwner<'a> {
|
|||
}
|
||||
}
|
||||
} else if walker.stack.len() == 1 {
|
||||
if let DelimToken::Delim(_, is_end) = walker.ts.get(*u) {
|
||||
if let DelimToken::Delim(_, is_end) = walker.top().get(*u) {
|
||||
if !is_end {
|
||||
let (_, last_idx) = &walker.stack[0];
|
||||
if let DelimToken::Token(token) = walker.ts.get(*last_idx) {
|
||||
|
@ -310,10 +312,16 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
|
|||
}
|
||||
}
|
||||
fn is_token_joint_to_next(&self, pos: usize) -> bool {
|
||||
self.walker.get(pos).unwrap().is_joint_to_next
|
||||
match self.walker.get(pos) {
|
||||
Some(t) => t.is_joint_to_next,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
fn is_keyword(&self, pos: usize, kw: &str) -> bool {
|
||||
self.walker.get(pos).unwrap().text == *kw
|
||||
match self.walker.get(pos) {
|
||||
Some(t) => t.text == *kw,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -84,6 +84,21 @@ impl<'a> TtCursor<'a> {
|
|||
parser.parse_path()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_expr(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_expr()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_ty(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_ty()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_pat(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_pat()
|
||||
}
|
||||
|
||||
pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> {
|
||||
if self.at_char(char) {
|
||||
self.bump();
|
||||
|
|
|
@ -53,6 +53,18 @@ pub(crate) fn path(p: &mut Parser) {
|
|||
paths::type_path(p);
|
||||
}
|
||||
|
||||
pub(crate) fn expr(p: &mut Parser) {
|
||||
expressions::expr(p);
|
||||
}
|
||||
|
||||
pub(crate) fn type_(p: &mut Parser) {
|
||||
types::type_(p)
|
||||
}
|
||||
|
||||
pub(crate) fn pattern(p: &mut Parser) {
|
||||
patterns::pattern(p)
|
||||
}
|
||||
|
||||
pub(crate) fn reparser(
|
||||
node: SyntaxKind,
|
||||
first_child: Option<SyntaxKind>,
|
||||
|
|
|
@ -8,17 +8,20 @@ const EXPR_FIRST: TokenSet = LHS_FIRST;
|
|||
|
||||
pub(super) fn expr(p: &mut Parser) -> BlockLike {
|
||||
let r = Restrictions { forbid_structs: false, prefer_stmt: false };
|
||||
expr_bp(p, r, 1).1
|
||||
let mut dollar_lvl = 0;
|
||||
expr_bp(p, r, 1, &mut dollar_lvl).1
|
||||
}
|
||||
|
||||
pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) {
|
||||
let r = Restrictions { forbid_structs: false, prefer_stmt: true };
|
||||
expr_bp(p, r, 1)
|
||||
let mut dollar_lvl = 0;
|
||||
expr_bp(p, r, 1, &mut dollar_lvl)
|
||||
}
|
||||
|
||||
fn expr_no_struct(p: &mut Parser) {
|
||||
let r = Restrictions { forbid_structs: true, prefer_stmt: false };
|
||||
expr_bp(p, r, 1);
|
||||
let mut dollar_lvl = 0;
|
||||
expr_bp(p, r, 1, &mut dollar_lvl);
|
||||
}
|
||||
|
||||
// test block
|
||||
|
@ -206,8 +209,23 @@ fn current_op(p: &Parser) -> (u8, Op) {
|
|||
}
|
||||
|
||||
// Parses expression with binding power of at least bp.
|
||||
fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) {
|
||||
let mut lhs = match lhs(p, r) {
|
||||
fn expr_bp(
|
||||
p: &mut Parser,
|
||||
r: Restrictions,
|
||||
mut bp: u8,
|
||||
dollar_lvl: &mut usize,
|
||||
) -> (Option<CompletedMarker>, BlockLike) {
|
||||
// `newly_dollar_open` is a flag indicated that dollar is just closed after lhs, e.g.
|
||||
// `$1$ + a`
|
||||
// We use this flag to skip handling it.
|
||||
let mut newly_dollar_open = false;
|
||||
|
||||
if p.at_l_dollar() {
|
||||
*dollar_lvl += p.eat_l_dollars();
|
||||
newly_dollar_open = true;
|
||||
}
|
||||
|
||||
let mut lhs = match lhs(p, r, dollar_lvl) {
|
||||
Some((lhs, blocklike)) => {
|
||||
// test stmt_bin_expr_ambiguity
|
||||
// fn foo() {
|
||||
|
@ -223,6 +241,15 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
|
|||
};
|
||||
|
||||
loop {
|
||||
if *dollar_lvl > 0 && p.at_r_dollar() {
|
||||
*dollar_lvl -= p.eat_r_dollars(*dollar_lvl);
|
||||
if !newly_dollar_open {
|
||||
// We "pump" bp for make it highest priority
|
||||
bp = 255;
|
||||
}
|
||||
newly_dollar_open = false;
|
||||
}
|
||||
|
||||
let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ;
|
||||
let (op_bp, op) = current_op(p);
|
||||
if op_bp < bp {
|
||||
|
@ -235,7 +262,8 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
|
|||
p.bump_compound(kind, n);
|
||||
}
|
||||
}
|
||||
expr_bp(p, r, op_bp + 1);
|
||||
|
||||
expr_bp(p, r, op_bp + 1, dollar_lvl);
|
||||
lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
|
||||
}
|
||||
(Some(lhs), BlockLike::NotBlock)
|
||||
|
@ -244,7 +272,11 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
|
|||
const LHS_FIRST: TokenSet =
|
||||
atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]);
|
||||
|
||||
fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
|
||||
fn lhs(
|
||||
p: &mut Parser,
|
||||
r: Restrictions,
|
||||
dollar_lvl: &mut usize,
|
||||
) -> Option<(CompletedMarker, BlockLike)> {
|
||||
let m;
|
||||
let kind = match p.current() {
|
||||
// test ref_expr
|
||||
|
@ -275,7 +307,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)>
|
|||
m = p.start();
|
||||
p.bump();
|
||||
if p.at_ts(EXPR_FIRST) {
|
||||
expr_bp(p, r, 2);
|
||||
expr_bp(p, r, 2, dollar_lvl);
|
||||
}
|
||||
return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
|
||||
}
|
||||
|
@ -287,7 +319,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)>
|
|||
));
|
||||
}
|
||||
};
|
||||
expr_bp(p, r, 255);
|
||||
expr_bp(p, r, 255, dollar_lvl);
|
||||
Some((m.complete(p, kind), BlockLike::NotBlock))
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
|
|||
.union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE, MINUS]);
|
||||
|
||||
pub(super) fn pattern(p: &mut Parser) {
|
||||
pattern_r(p, PAT_RECOVERY_SET)
|
||||
pattern_r(p, PAT_RECOVERY_SET);
|
||||
}
|
||||
|
||||
/// Parses a pattern list separated by pipes `|`
|
||||
|
|
|
@ -53,20 +53,39 @@ pub trait TreeSink {
|
|||
fn error(&mut self, error: ParseError);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a rust file.
|
||||
pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
fn parse_from_tokens<F>(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F)
|
||||
where
|
||||
F: FnOnce(&mut parser::Parser),
|
||||
{
|
||||
let mut p = parser::Parser::new(token_source);
|
||||
grammar::root(&mut p);
|
||||
f(&mut p);
|
||||
let events = p.finish();
|
||||
event::process(tree_sink, events);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a rust file.
|
||||
pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::root);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a path
|
||||
pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
let mut p = parser::Parser::new(token_source);
|
||||
grammar::path(&mut p);
|
||||
let events = p.finish();
|
||||
event::process(tree_sink, events);
|
||||
parse_from_tokens(token_source, tree_sink, grammar::path);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a expression
|
||||
pub fn parse_expr(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::expr);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a ty
|
||||
pub fn parse_ty(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::type_);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a pattern
|
||||
pub fn parse_pat(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::pattern);
|
||||
}
|
||||
|
||||
/// A parsing function for a specific braced-block.
|
||||
|
|
|
@ -45,8 +45,9 @@ impl<'t> Parser<'t> {
|
|||
///
|
||||
/// Useful for parsing things like `>>`.
|
||||
pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> {
|
||||
let c1 = self.token_source.token_kind(self.token_pos);
|
||||
let c2 = self.token_source.token_kind(self.token_pos + 1);
|
||||
let c1 = self.nth(0);
|
||||
let c2 = self.nth(1);
|
||||
|
||||
if self.token_source.is_token_joint_to_next(self.token_pos) {
|
||||
Some((c1, c2))
|
||||
} else {
|
||||
|
@ -59,9 +60,9 @@ impl<'t> Parser<'t> {
|
|||
///
|
||||
/// Useful for parsing things like `=>>`.
|
||||
pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> {
|
||||
let c1 = self.token_source.token_kind(self.token_pos);
|
||||
let c2 = self.token_source.token_kind(self.token_pos + 1);
|
||||
let c3 = self.token_source.token_kind(self.token_pos + 2);
|
||||
let c1 = self.nth(0);
|
||||
let c2 = self.nth(1);
|
||||
let c3 = self.nth(2);
|
||||
if self.token_source.is_token_joint_to_next(self.token_pos)
|
||||
&& self.token_source.is_token_joint_to_next(self.token_pos + 1)
|
||||
{
|
||||
|
@ -77,7 +78,23 @@ impl<'t> Parser<'t> {
|
|||
let steps = self.steps.get();
|
||||
assert!(steps <= 10_000_000, "the parser seems stuck");
|
||||
self.steps.set(steps + 1);
|
||||
self.token_source.token_kind(self.token_pos + n)
|
||||
|
||||
// It is beecause the Dollar will appear between nth
|
||||
// Following code skips through it
|
||||
let mut non_dollars_count = 0;
|
||||
let mut i = 0;
|
||||
|
||||
loop {
|
||||
let kind = self.token_source.token_kind(self.token_pos + i);
|
||||
i += 1;
|
||||
|
||||
match kind {
|
||||
EOF => return EOF,
|
||||
SyntaxKind::L_DOLLAR | SyntaxKind::R_DOLLAR => {}
|
||||
_ if non_dollars_count == n => return kind,
|
||||
_ => non_dollars_count += 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if the current token is `kind`.
|
||||
|
@ -99,8 +116,6 @@ impl<'t> Parser<'t> {
|
|||
/// consumed between the `start` and the corresponding `Marker::complete`
|
||||
/// belong to the same node.
|
||||
pub(crate) fn start(&mut self) -> Marker {
|
||||
self.eat_dollars();
|
||||
|
||||
let pos = self.events.len() as u32;
|
||||
self.push_event(Event::tombstone());
|
||||
Marker::new(pos)
|
||||
|
@ -185,7 +200,6 @@ impl<'t> Parser<'t> {
|
|||
self.eat_dollars();
|
||||
self.token_pos += usize::from(n_raw_tokens);
|
||||
self.push_event(Event::Token { kind, n_raw_tokens });
|
||||
self.eat_dollars();
|
||||
}
|
||||
|
||||
fn push_event(&mut self, event: Event) {
|
||||
|
@ -193,12 +207,64 @@ impl<'t> Parser<'t> {
|
|||
}
|
||||
|
||||
fn eat_dollars(&mut self) {
|
||||
while self.nth(0) == SyntaxKind::L_DOLLAR || self.nth(0) == SyntaxKind::R_DOLLAR {
|
||||
let kind = self.nth(0);
|
||||
self.token_pos += 1;
|
||||
self.push_event(Event::Token { kind, n_raw_tokens: 1 });
|
||||
loop {
|
||||
match self.token_source.token_kind(self.token_pos) {
|
||||
k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => {
|
||||
self.token_pos += 1;
|
||||
self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
|
||||
}
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn eat_l_dollars(&mut self) -> usize {
|
||||
let mut ate_count = 0;
|
||||
loop {
|
||||
match self.token_source.token_kind(self.token_pos) {
|
||||
k @ SyntaxKind::L_DOLLAR => {
|
||||
self.token_pos += 1;
|
||||
self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
|
||||
ate_count += 1;
|
||||
}
|
||||
_ => {
|
||||
return ate_count;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize {
|
||||
let mut ate_count = 0;
|
||||
loop {
|
||||
match self.token_source.token_kind(self.token_pos) {
|
||||
k @ SyntaxKind::R_DOLLAR => {
|
||||
self.token_pos += 1;
|
||||
self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
|
||||
ate_count += 1;
|
||||
|
||||
if max_count >= ate_count {
|
||||
return ate_count;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return ate_count;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn at_l_dollar(&self) -> bool {
|
||||
let kind = self.token_source.token_kind(self.token_pos);
|
||||
(kind == SyntaxKind::L_DOLLAR)
|
||||
}
|
||||
|
||||
pub(crate) fn at_r_dollar(&self) -> bool {
|
||||
let kind = self.token_source.token_kind(self.token_pos);
|
||||
(kind == SyntaxKind::R_DOLLAR)
|
||||
}
|
||||
}
|
||||
|
||||
/// See `Parser::start`.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue