mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Combine all tokensource to one and refactoring
This commit is contained in:
parent
7f1e93a3c9
commit
a7254201df
5 changed files with 94 additions and 86 deletions
|
@ -21,6 +21,7 @@ mod mbe_expander;
|
||||||
mod syntax_bridge;
|
mod syntax_bridge;
|
||||||
mod tt_cursor;
|
mod tt_cursor;
|
||||||
mod subtree_source;
|
mod subtree_source;
|
||||||
|
mod subtree_parser;
|
||||||
|
|
||||||
use ra_syntax::SmolStr;
|
use ra_syntax::SmolStr;
|
||||||
|
|
||||||
|
|
59
crates/ra_mbe/src/subtree_parser.rs
Normal file
59
crates/ra_mbe/src/subtree_parser.rs
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
use crate::subtree_source::SubtreeTokenSource;
|
||||||
|
|
||||||
|
use ra_parser::{TokenSource, TreeSink};
|
||||||
|
use ra_syntax::{SyntaxKind};
|
||||||
|
|
||||||
|
struct OffsetTokenSink {
|
||||||
|
token_pos: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TreeSink for OffsetTokenSink {
|
||||||
|
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
|
||||||
|
self.token_pos += n_tokens as usize;
|
||||||
|
}
|
||||||
|
fn start_node(&mut self, _kind: SyntaxKind) {}
|
||||||
|
fn finish_node(&mut self) {}
|
||||||
|
fn error(&mut self, _error: ra_parser::ParseError) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Parser<'a> {
|
||||||
|
subtree: &'a tt::Subtree,
|
||||||
|
pos: &'a mut usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Parser<'a> {
|
||||||
|
pub fn new(pos: &'a mut usize, subtree: &'a tt::Subtree) -> Parser<'a> {
|
||||||
|
Parser { pos, subtree }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_path(self) -> Option<tt::TokenTree> {
|
||||||
|
self.parse(ra_parser::parse_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse<F>(self, f: F) -> Option<tt::TokenTree>
|
||||||
|
where
|
||||||
|
F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
|
||||||
|
{
|
||||||
|
let mut src = SubtreeTokenSource::new(self.subtree);
|
||||||
|
src.advance(*self.pos, true);
|
||||||
|
let mut sink = OffsetTokenSink { token_pos: 0 };
|
||||||
|
|
||||||
|
f(&src, &mut sink);
|
||||||
|
|
||||||
|
self.finish(sink.token_pos, &mut src)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> {
|
||||||
|
let res = src.bump_n(parsed_token, self.pos);
|
||||||
|
let res: Vec<_> = res.into_iter().cloned().collect();
|
||||||
|
|
||||||
|
match res.len() {
|
||||||
|
0 => None,
|
||||||
|
1 => Some(res[0].clone()),
|
||||||
|
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::None,
|
||||||
|
token_trees: res,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,12 +9,12 @@ struct TtToken {
|
||||||
pub n_tokens: usize,
|
pub n_tokens: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// SubtreeSourceQuerier let outside to query internal tokens as string
|
/// Querier let outside to query internal tokens as string
|
||||||
pub(crate) struct SubtreeSourceQuerier<'a> {
|
pub(crate) struct Querier<'a> {
|
||||||
src: &'a SubtreeTokenSource<'a>,
|
src: &'a SubtreeTokenSource<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> SubtreeSourceQuerier<'a> {
|
impl<'a> Querier<'a> {
|
||||||
pub(crate) fn token(&self, uidx: usize) -> (SyntaxKind, &SmolStr) {
|
pub(crate) fn token(&self, uidx: usize) -> (SyntaxKind, &SmolStr) {
|
||||||
let tkn = &self.src.tokens[uidx];
|
let tkn = &self.src.tokens[uidx];
|
||||||
(tkn.kind, &tkn.text)
|
(tkn.kind, &tkn.text)
|
||||||
|
@ -32,7 +32,8 @@ impl<'a> SubtreeTokenSource<'a> {
|
||||||
SubtreeTokenSource { tokens: TtTokenBuilder::build(subtree), tt_pos: 0, subtree }
|
SubtreeTokenSource { tokens: TtTokenBuilder::build(subtree), tt_pos: 0, subtree }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn advance(&mut self, curr: usize, skip_first_delimiter: bool) {
|
// Advance token source and skip the first delimiter
|
||||||
|
pub fn advance(&mut self, n_token: usize, skip_first_delimiter: bool) {
|
||||||
if skip_first_delimiter {
|
if skip_first_delimiter {
|
||||||
self.tt_pos += 1;
|
self.tt_pos += 1;
|
||||||
}
|
}
|
||||||
|
@ -47,32 +48,20 @@ impl<'a> SubtreeTokenSource<'a> {
|
||||||
// Such that we cannot simpliy advance the cursor
|
// Such that we cannot simpliy advance the cursor
|
||||||
// We have to bump it one by one
|
// We have to bump it one by one
|
||||||
let mut pos = 0;
|
let mut pos = 0;
|
||||||
while pos < curr {
|
while pos < n_token {
|
||||||
pos += self.bump(&self.subtree.token_trees[pos]);
|
pos += self.bump(&self.subtree.token_trees[pos]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn querier(&self) -> SubtreeSourceQuerier {
|
pub fn querier(&self) -> Querier {
|
||||||
SubtreeSourceQuerier { src: self }
|
Querier { src: self }
|
||||||
}
|
|
||||||
|
|
||||||
fn count(&self, tt: &tt::TokenTree) -> usize {
|
|
||||||
assert!(!self.tokens.is_empty());
|
|
||||||
TtTokenBuilder::count_tt_tokens(tt, None)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn bump(&mut self, tt: &tt::TokenTree) -> usize {
|
|
||||||
let cur = &self.tokens[self.tt_pos];
|
|
||||||
let n_tokens = cur.n_tokens;
|
|
||||||
self.tt_pos += self.count(tt);
|
|
||||||
n_tokens
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn bump_n(
|
pub(crate) fn bump_n(
|
||||||
&mut self,
|
&mut self,
|
||||||
n_tokens: usize,
|
n_tt_tokens: usize,
|
||||||
mut token_pos: usize,
|
token_pos: &mut usize,
|
||||||
) -> (usize, Vec<&tt::TokenTree>) {
|
) -> Vec<&tt::TokenTree> {
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
// Matching `TtToken` cursor to `tt::TokenTree` cursor
|
// Matching `TtToken` cursor to `tt::TokenTree` cursor
|
||||||
// It is because TtToken is not One to One mapping to tt::Token
|
// It is because TtToken is not One to One mapping to tt::Token
|
||||||
|
@ -83,17 +72,28 @@ impl<'a> SubtreeTokenSource<'a> {
|
||||||
//
|
//
|
||||||
// Such that we cannot simpliy advance the cursor
|
// Such that we cannot simpliy advance the cursor
|
||||||
// We have to bump it one by one
|
// We have to bump it one by one
|
||||||
let next_pos = self.tt_pos + n_tokens;
|
let next_pos = self.tt_pos + n_tt_tokens;
|
||||||
let old_token_pos = token_pos;
|
|
||||||
|
|
||||||
while self.tt_pos < next_pos {
|
while self.tt_pos < next_pos {
|
||||||
let current = &self.subtree.token_trees[token_pos];
|
let current = &self.subtree.token_trees[*token_pos];
|
||||||
let n = self.bump(current);
|
let n = self.bump(current);
|
||||||
res.extend((0..n).map(|i| &self.subtree.token_trees[token_pos + i]));
|
res.extend((0..n).map(|i| &self.subtree.token_trees[*token_pos + i]));
|
||||||
token_pos += n;
|
*token_pos += n;
|
||||||
}
|
}
|
||||||
|
|
||||||
(token_pos - old_token_pos, res)
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count(&self, tt: &tt::TokenTree) -> usize {
|
||||||
|
assert!(!self.tokens.is_empty());
|
||||||
|
TtTokenBuilder::count_tt_tokens(tt, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bump(&mut self, tt: &tt::TokenTree) -> usize {
|
||||||
|
let cur = &self.tokens[self.tt_pos];
|
||||||
|
let n_tokens = cur.n_tokens;
|
||||||
|
self.tt_pos += self.count(tt);
|
||||||
|
n_tokens
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use ra_syntax::{
|
||||||
ast, SyntaxKind::*, TextUnit
|
ast, SyntaxKind::*, TextUnit
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::subtree_source::{SubtreeTokenSource, SubtreeSourceQuerier};
|
use crate::subtree_source::{SubtreeTokenSource, Querier};
|
||||||
|
|
||||||
/// Maps `tt::TokenId` to the relative range of the original token.
|
/// Maps `tt::TokenId` to the relative range of the original token.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -107,14 +107,14 @@ fn convert_tt(
|
||||||
|
|
||||||
struct TtTreeSink<'a> {
|
struct TtTreeSink<'a> {
|
||||||
buf: String,
|
buf: String,
|
||||||
src_querier: SubtreeSourceQuerier<'a>,
|
src_querier: Querier<'a>,
|
||||||
text_pos: TextUnit,
|
text_pos: TextUnit,
|
||||||
token_pos: usize,
|
token_pos: usize,
|
||||||
inner: SyntaxTreeBuilder,
|
inner: SyntaxTreeBuilder,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TtTreeSink<'a> {
|
impl<'a> TtTreeSink<'a> {
|
||||||
fn new(src_querier: SubtreeSourceQuerier<'a>) -> TtTreeSink {
|
fn new(src_querier: Querier<'a>) -> TtTreeSink {
|
||||||
TtTreeSink {
|
TtTreeSink {
|
||||||
buf: String::new(),
|
buf: String::new(),
|
||||||
src_querier,
|
src_querier,
|
||||||
|
|
|
@ -1,25 +1,5 @@
|
||||||
use crate::ParseError;
|
use crate::ParseError;
|
||||||
use crate::subtree_source::SubtreeTokenSource;
|
use crate::subtree_parser::Parser;
|
||||||
|
|
||||||
use ra_parser::{TokenSource, TreeSink};
|
|
||||||
|
|
||||||
use ra_syntax::{
|
|
||||||
SyntaxKind
|
|
||||||
};
|
|
||||||
|
|
||||||
struct SubtreeTokenSink {
|
|
||||||
token_pos: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeSink for SubtreeTokenSink {
|
|
||||||
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
|
|
||||||
self.token_pos += n_tokens as usize;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn start_node(&mut self, _kind: SyntaxKind) {}
|
|
||||||
fn finish_node(&mut self) {}
|
|
||||||
fn error(&mut self, _error: ra_parser::ParseError) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub(crate) struct TtCursor<'a> {
|
pub(crate) struct TtCursor<'a> {
|
||||||
|
@ -99,41 +79,9 @@ impl<'a> TtCursor<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eat_parse_result(
|
|
||||||
&mut self,
|
|
||||||
parsed_token: usize,
|
|
||||||
src: &mut SubtreeTokenSource,
|
|
||||||
) -> Option<tt::TokenTree> {
|
|
||||||
let (adv, res) = src.bump_n(parsed_token, self.pos);
|
|
||||||
self.pos += adv;
|
|
||||||
|
|
||||||
let res: Vec<_> = res.into_iter().cloned().collect();
|
|
||||||
|
|
||||||
match res.len() {
|
|
||||||
0 => None,
|
|
||||||
1 => Some(res[0].clone()),
|
|
||||||
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::None,
|
|
||||||
token_trees: res,
|
|
||||||
})),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn eat_parse<F>(&mut self, f: F) -> Option<tt::TokenTree>
|
|
||||||
where
|
|
||||||
F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
|
|
||||||
{
|
|
||||||
let mut src = SubtreeTokenSource::new(self.subtree);
|
|
||||||
src.advance(self.pos, true);
|
|
||||||
let mut sink = SubtreeTokenSink { token_pos: 0 };
|
|
||||||
|
|
||||||
f(&src, &mut sink);
|
|
||||||
|
|
||||||
self.eat_parse_result(sink.token_pos, &mut src)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn eat_path(&mut self) -> Option<tt::TokenTree> {
|
pub(crate) fn eat_path(&mut self) -> Option<tt::TokenTree> {
|
||||||
self.eat_parse(ra_parser::parse_path)
|
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||||
|
parser.parse_path()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> {
|
pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue