mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
docs
This commit is contained in:
parent
2fa2805887
commit
412ac63ff5
11 changed files with 143 additions and 98 deletions
|
@ -1,4 +1,7 @@
|
|||
use std::mem;
|
||||
|
||||
use ra_parser::{TreeSink, ParseError};
|
||||
use rowan::GreenNodeBuilder;
|
||||
|
||||
use crate::{
|
||||
SmolStr, SyntaxError, SyntaxErrorKind, TextUnit, TextRange,
|
||||
|
@ -7,19 +10,32 @@ use crate::{
|
|||
syntax_node::{GreenNode, RaTypes},
|
||||
};
|
||||
|
||||
use rowan::GreenNodeBuilder;
|
||||
|
||||
/// Bridges the parser with our specific syntax tree representation.
|
||||
///
|
||||
/// `TreeBuilder` also handles attachment of trivia (whitespace) to nodes.
|
||||
pub(crate) struct TreeBuilder<'a> {
|
||||
text: &'a str,
|
||||
tokens: &'a [Token],
|
||||
text_pos: TextUnit,
|
||||
token_pos: usize,
|
||||
state: State,
|
||||
errors: Vec<SyntaxError>,
|
||||
inner: GreenNodeBuilder<RaTypes>,
|
||||
}
|
||||
|
||||
enum State {
|
||||
PendingStart,
|
||||
Normal,
|
||||
PendingFinish,
|
||||
}
|
||||
|
||||
impl<'a> TreeSink for TreeBuilder<'a> {
|
||||
fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
||||
match mem::replace(&mut self.state, State::Normal) {
|
||||
State::PendingStart => unreachable!(),
|
||||
State::PendingFinish => self.inner.finish_internal(),
|
||||
State::Normal => (),
|
||||
}
|
||||
self.eat_trivias();
|
||||
let n_tokens = n_tokens as usize;
|
||||
let len = self.tokens[self.token_pos..self.token_pos + n_tokens]
|
||||
|
@ -29,11 +45,18 @@ impl<'a> TreeSink for TreeBuilder<'a> {
|
|||
self.do_leaf(kind, len, n_tokens);
|
||||
}
|
||||
|
||||
fn start_branch(&mut self, kind: SyntaxKind, root: bool) {
|
||||
if root {
|
||||
self.inner.start_internal(kind);
|
||||
return;
|
||||
fn start_branch(&mut self, kind: SyntaxKind) {
|
||||
match mem::replace(&mut self.state, State::Normal) {
|
||||
State::PendingStart => {
|
||||
self.inner.start_internal(kind);
|
||||
// No need to attach trivias to previous node: there is no
|
||||
// previous node.
|
||||
return;
|
||||
}
|
||||
State::PendingFinish => self.inner.finish_internal(),
|
||||
State::Normal => (),
|
||||
}
|
||||
|
||||
let n_trivias =
|
||||
self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count();
|
||||
let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
|
||||
|
@ -54,11 +77,12 @@ impl<'a> TreeSink for TreeBuilder<'a> {
|
|||
self.eat_n_trivias(n_attached_trivias);
|
||||
}
|
||||
|
||||
fn finish_branch(&mut self, root: bool) {
|
||||
if root {
|
||||
self.eat_trivias()
|
||||
fn finish_branch(&mut self) {
|
||||
match mem::replace(&mut self.state, State::PendingFinish) {
|
||||
State::PendingStart => unreachable!(),
|
||||
State::PendingFinish => self.inner.finish_internal(),
|
||||
State::Normal => (),
|
||||
}
|
||||
self.inner.finish_internal();
|
||||
}
|
||||
|
||||
fn error(&mut self, error: ParseError) {
|
||||
|
@ -74,12 +98,21 @@ impl<'a> TreeBuilder<'a> {
|
|||
tokens,
|
||||
text_pos: 0.into(),
|
||||
token_pos: 0,
|
||||
state: State::PendingStart,
|
||||
errors: Vec::new(),
|
||||
inner: GreenNodeBuilder::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(self) -> (GreenNode, Vec<SyntaxError>) {
|
||||
pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) {
|
||||
match mem::replace(&mut self.state, State::Normal) {
|
||||
State::PendingFinish => {
|
||||
self.eat_trivias();
|
||||
self.inner.finish_internal()
|
||||
}
|
||||
State::PendingStart | State::Normal => unreachable!(),
|
||||
}
|
||||
|
||||
(self.inner.finish(), self.errors)
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,26 @@ use crate::{
|
|||
parsing::lexer::Token,
|
||||
};
|
||||
|
||||
pub(crate) struct ParserInput<'t> {
|
||||
text: &'t str,
|
||||
/// start position of each token(expect whitespace and comment)
|
||||
/// ```non-rust
|
||||
/// struct Foo;
|
||||
/// ^------^---
|
||||
/// | | ^-
|
||||
/// 0 7 10
|
||||
/// ```
|
||||
/// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
|
||||
start_offsets: Vec<TextUnit>,
|
||||
/// non-whitespace/comment tokens
|
||||
/// ```non-rust
|
||||
/// struct Foo {}
|
||||
/// ^^^^^^ ^^^ ^^
|
||||
/// ```
|
||||
/// tokens: `[struct, Foo, {, }]`
|
||||
tokens: Vec<Token>,
|
||||
}
|
||||
|
||||
impl<'t> TokenSource for ParserInput<'t> {
|
||||
fn token_kind(&self, pos: usize) -> SyntaxKind {
|
||||
if !(pos < self.tokens.len()) {
|
||||
|
@ -28,26 +48,6 @@ impl<'t> TokenSource for ParserInput<'t> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ParserInput<'t> {
|
||||
text: &'t str,
|
||||
/// start position of each token(expect whitespace and comment)
|
||||
/// ```non-rust
|
||||
/// struct Foo;
|
||||
/// ^------^---
|
||||
/// | | ^-
|
||||
/// 0 7 10
|
||||
/// ```
|
||||
/// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
|
||||
start_offsets: Vec<TextUnit>,
|
||||
/// non-whitespace/comment tokens
|
||||
/// ```non-rust
|
||||
/// struct Foo {}
|
||||
/// ^^^^^^ ^^^ ^^
|
||||
/// ```
|
||||
/// tokens: `[struct, Foo, {, }]`
|
||||
tokens: Vec<Token>,
|
||||
}
|
||||
|
||||
impl<'t> ParserInput<'t> {
|
||||
/// Generate input from tokens(expect comment and whitespace).
|
||||
pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> {
|
||||
|
|
|
@ -1,11 +1,18 @@
|
|||
//! Implementation of incremental re-parsing.
|
||||
//!
|
||||
//! We use two simple strategies for this:
|
||||
//! - if the edit modifies only a single token (like changing an identifier's
|
||||
//! letter), we replace only this token.
|
||||
//! - otherwise, we search for the nearest `{}` block which contains the edit
|
||||
//! and try to parse only this block.
|
||||
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
use ra_parser::Reparser;
|
||||
|
||||
use crate::{
|
||||
SyntaxKind::*, TextRange, TextUnit,
|
||||
SyntaxKind::*, TextRange, TextUnit, SyntaxError,
|
||||
algo,
|
||||
syntax_node::{GreenNode, SyntaxNode},
|
||||
syntax_error::SyntaxError,
|
||||
parsing::{
|
||||
input::ParserInput,
|
||||
builder::TreeBuilder,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue