mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 06:11:35 +00:00
Track synthetic tokens, to be able to remove them again later
This commit is contained in:
parent
1b5cd03a37
commit
1a5aa84e9f
6 changed files with 133 additions and 39 deletions
|
@ -5,8 +5,8 @@ use std::sync::Arc;
|
|||
use base_db::{salsa, SourceDatabase};
|
||||
use either::Either;
|
||||
use limit::Limit;
|
||||
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult, SyntheticToken};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{
|
||||
algo::diff,
|
||||
ast::{self, HasAttrs, HasDocComments},
|
||||
|
@ -442,7 +442,7 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar
|
|||
));
|
||||
}
|
||||
|
||||
fixup::reverse_fixups(&mut tt);
|
||||
fixup::reverse_fixups(&mut tt, ¯o_arg.1);
|
||||
|
||||
ExpandResult { value: Some(Arc::new(tt)), err }
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use mbe::SyntheticToken;
|
||||
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{
|
||||
ast::{self, AstNode},
|
||||
match_ast, SyntaxKind, SyntaxNode, SyntaxToken,
|
||||
match_ast, SyntaxKind, SyntaxNode, TextRange,
|
||||
};
|
||||
use tt::{Leaf, Subtree};
|
||||
use tt::Subtree;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SyntaxFixups {
|
||||
|
@ -16,6 +16,7 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
let mut append = FxHashMap::default();
|
||||
let mut replace = FxHashMap::default();
|
||||
let mut preorder = node.preorder();
|
||||
let empty_id = SyntheticTokenId(0);
|
||||
while let Some(event) = preorder.next() {
|
||||
let node = match event {
|
||||
syntax::WalkEvent::Enter(node) => node,
|
||||
|
@ -27,12 +28,32 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
preorder.skip_subtree();
|
||||
continue;
|
||||
}
|
||||
let end_range = TextRange::empty(node.text_range().end());
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::FieldExpr(it) => {
|
||||
if it.name_ref().is_none() {
|
||||
// incomplete field access: some_expr.|
|
||||
append.insert(node.clone(), vec![(SyntaxKind::IDENT, "__ra_fixup".into())]);
|
||||
append.insert(node.clone(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::IDENT,
|
||||
text: "__ra_fixup".into(),
|
||||
range: end_range,
|
||||
id: empty_id,
|
||||
},
|
||||
]);
|
||||
}
|
||||
},
|
||||
ast::ExprStmt(it) => {
|
||||
if it.semicolon_token().is_none() {
|
||||
append.insert(node.clone(), vec![
|
||||
SyntheticToken {
|
||||
kind: SyntaxKind::SEMICOLON,
|
||||
text: ";".into(),
|
||||
range: end_range,
|
||||
id: empty_id,
|
||||
},
|
||||
]);
|
||||
}
|
||||
},
|
||||
_ => (),
|
||||
|
@ -42,20 +63,21 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
|||
SyntaxFixups { append, replace }
|
||||
}
|
||||
|
||||
pub fn reverse_fixups(tt: &mut Subtree) {
|
||||
pub fn reverse_fixups(tt: &mut Subtree, token_map: &TokenMap) {
|
||||
eprintln!("token_map: {:?}", token_map);
|
||||
tt.token_trees.retain(|tt| match tt {
|
||||
tt::TokenTree::Leaf(Leaf::Ident(ident)) => ident.text != "__ra_fixup",
|
||||
tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()).is_none(),
|
||||
_ => true,
|
||||
});
|
||||
tt.token_trees.iter_mut().for_each(|tt| match tt {
|
||||
tt::TokenTree::Subtree(tt) => reverse_fixups(tt),
|
||||
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map),
|
||||
_ => {}
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use expect_test::{Expect, expect};
|
||||
use expect_test::{expect, Expect};
|
||||
|
||||
use super::reverse_fixups;
|
||||
|
||||
|
@ -63,7 +85,7 @@ mod tests {
|
|||
fn check(ra_fixture: &str, mut expect: Expect) {
|
||||
let parsed = syntax::SourceFile::parse(ra_fixture);
|
||||
let fixups = super::fixup_syntax(&parsed.syntax_node());
|
||||
let (mut tt, _tmap) = mbe::syntax_node_to_token_tree_censored(
|
||||
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(
|
||||
&parsed.syntax_node(),
|
||||
fixups.replace,
|
||||
fixups.append,
|
||||
|
@ -77,9 +99,14 @@ mod tests {
|
|||
|
||||
// the fixed-up tree should be syntactically valid
|
||||
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
|
||||
assert_eq!(parse.errors(), &[], "parse has syntax errors. parse tree:\n{:#?}", parse.syntax_node());
|
||||
assert_eq!(
|
||||
parse.errors(),
|
||||
&[],
|
||||
"parse has syntax errors. parse tree:\n{:#?}",
|
||||
parse.syntax_node()
|
||||
);
|
||||
|
||||
reverse_fixups(&mut tt);
|
||||
reverse_fixups(&mut tt, &tmap);
|
||||
|
||||
// the fixed-up + reversed version should be equivalent to the original input
|
||||
// (but token IDs don't matter)
|
||||
|
@ -89,48 +116,60 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn incomplete_field_expr_1() {
|
||||
check(r#"
|
||||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
a.
|
||||
}
|
||||
"#, expect![[r#"
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {a . __ra_fixup}
|
||||
"#]])
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn incomplete_field_expr_2() {
|
||||
check(r#"
|
||||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
a. ;
|
||||
}
|
||||
"#, expect![[r#"
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {a . __ra_fixup ;}
|
||||
"#]])
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn incomplete_field_expr_3() {
|
||||
check(r#"
|
||||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
a. ;
|
||||
bar();
|
||||
}
|
||||
"#, expect![[r#"
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {a . __ra_fixup ; bar () ;}
|
||||
"#]])
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_expr_before_call() {
|
||||
// another case that easily happens while typing
|
||||
check(r#"
|
||||
check(
|
||||
r#"
|
||||
fn foo() {
|
||||
a.b
|
||||
bar();
|
||||
}
|
||||
"#, expect![[r#"
|
||||
fn foo () {a . b bar () ;}
|
||||
"#]])
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {a . b ; bar () ;}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue