mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 04:19:13 +00:00
Fix up token_tree_to_syntax_node float split handling
This commit is contained in:
parent
9053bcc65c
commit
c6e7917d6e
5 changed files with 76 additions and 12 deletions
|
@ -111,7 +111,8 @@ impl ItemTree {
|
||||||
Some(node) => node,
|
Some(node) => node,
|
||||||
None => return Default::default(),
|
None => return Default::default(),
|
||||||
};
|
};
|
||||||
if never!(syntax.kind() == SyntaxKind::ERROR) {
|
if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
|
||||||
|
{
|
||||||
// FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
|
// FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
|
||||||
return Default::default();
|
return Default::default();
|
||||||
}
|
}
|
||||||
|
@ -133,7 +134,7 @@ impl ItemTree {
|
||||||
ctx.lower_macro_stmts(stmts)
|
ctx.lower_macro_stmts(stmts)
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
panic!("cannot create item tree from {syntax:?} {syntax}");
|
panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -104,7 +104,7 @@ macro_rules! id {
|
||||||
$($t)*
|
$($t)*
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
id /*+errors*/! {
|
id! {
|
||||||
#[proc_macros::identity]
|
#[proc_macros::identity]
|
||||||
impl Foo for WrapBj {
|
impl Foo for WrapBj {
|
||||||
async fn foo(&self) {
|
async fn foo(&self) {
|
||||||
|
@ -113,18 +113,17 @@ id /*+errors*/! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r##"
|
expect![[r#"
|
||||||
macro_rules! id {
|
macro_rules! id {
|
||||||
($($t:tt)*) => {
|
($($t:tt)*) => {
|
||||||
$($t)*
|
$($t)*
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
/* parse error: expected SEMICOLON */
|
|
||||||
#[proc_macros::identity] impl Foo for WrapBj {
|
#[proc_macros::identity] impl Foo for WrapBj {
|
||||||
async fn foo(&self ) {
|
async fn foo(&self ) {
|
||||||
self .0.id().await ;
|
self .0.id().await ;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"##]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ pub fn token_tree_to_syntax_node(
|
||||||
parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
|
parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
|
||||||
tree_sink.token(kind, n_raw_tokens)
|
tree_sink.token(kind, n_raw_tokens)
|
||||||
}
|
}
|
||||||
parser::Step::FloatSplit { .. } => tree_sink.token(SyntaxKind::FLOAT_NUMBER, 1),
|
parser::Step::FloatSplit { has_pseudo_dot } => tree_sink.float_split(has_pseudo_dot),
|
||||||
parser::Step::Enter { kind } => tree_sink.start_node(kind),
|
parser::Step::Enter { kind } => tree_sink.start_node(kind),
|
||||||
parser::Step::Exit => tree_sink.finish_node(),
|
parser::Step::Exit => tree_sink.finish_node(),
|
||||||
parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
|
parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
|
||||||
|
@ -797,6 +797,41 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TtTreeSink<'a> {
|
impl<'a> TtTreeSink<'a> {
|
||||||
|
fn float_split(&mut self, has_pseudo_dot: bool) {
|
||||||
|
let (text, _span) = match self.cursor.token_tree() {
|
||||||
|
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
|
||||||
|
(lit.text.as_str(), lit.span)
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
match text.split_once('.') {
|
||||||
|
Some((left, right)) => {
|
||||||
|
assert!(!left.is_empty());
|
||||||
|
self.inner.start_node(SyntaxKind::NAME_REF);
|
||||||
|
self.inner.token(SyntaxKind::INT_NUMBER, left);
|
||||||
|
self.inner.finish_node();
|
||||||
|
|
||||||
|
// here we move the exit up, the original exit has been deleted in process
|
||||||
|
self.inner.finish_node();
|
||||||
|
|
||||||
|
self.inner.token(SyntaxKind::DOT, ".");
|
||||||
|
|
||||||
|
if has_pseudo_dot {
|
||||||
|
assert!(right.is_empty());
|
||||||
|
} else {
|
||||||
|
self.inner.start_node(SyntaxKind::NAME_REF);
|
||||||
|
self.inner.token(SyntaxKind::INT_NUMBER, right);
|
||||||
|
self.inner.finish_node();
|
||||||
|
|
||||||
|
// the parser creates an unbalanced start node, we are required to close it here
|
||||||
|
self.inner.finish_node();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => unreachable!(),
|
||||||
|
}
|
||||||
|
self.cursor = self.cursor.bump();
|
||||||
|
}
|
||||||
|
|
||||||
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
|
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
|
||||||
if kind == LIFETIME_IDENT {
|
if kind == LIFETIME_IDENT {
|
||||||
n_tokens = 2;
|
n_tokens = 2;
|
||||||
|
|
|
@ -170,12 +170,39 @@ impl<'a> TtIter<'a> {
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
|
|
||||||
if cursor.is_root() {
|
if cursor.is_root() {
|
||||||
|
if float_splits.is_empty() {
|
||||||
while curr != cursor {
|
while curr != cursor {
|
||||||
if let Some(token) = curr.token_tree() {
|
if let Some(token) = curr.token_tree() {
|
||||||
res.push(token.cloned());
|
res.push(token.cloned());
|
||||||
}
|
}
|
||||||
curr = curr.bump();
|
curr = curr.bump();
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// let mut float_splits = float_splits.into_iter().peekable();
|
||||||
|
// while let Some(tt) = curr.token_tree() {
|
||||||
|
// let mut tt = tt.cloned();
|
||||||
|
// let mut tt_mut_ref = &mut tt;
|
||||||
|
// if let Some(fs) = float_splits.peek() {
|
||||||
|
// loop {
|
||||||
|
// curr = curr.bump_subtree();
|
||||||
|
// if curr == *fs {
|
||||||
|
// float_splits.next();
|
||||||
|
// }
|
||||||
|
// if curr.is_root() {
|
||||||
|
// break;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// res.push(tt);
|
||||||
|
// }
|
||||||
|
|
||||||
|
while curr != cursor {
|
||||||
|
if let Some(token) = curr.token_tree() {
|
||||||
|
res.push(token.cloned());
|
||||||
|
}
|
||||||
|
curr = curr.bump();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
self.inner = self.inner.as_slice()[res.len()..].iter();
|
self.inner = self.inner.as_slice()[res.len()..].iter();
|
||||||
let res = match res.len() {
|
let res = match res.len() {
|
||||||
|
|
|
@ -102,10 +102,12 @@ impl TopEntryPoint {
|
||||||
match step {
|
match step {
|
||||||
Step::Enter { .. } => depth += 1,
|
Step::Enter { .. } => depth += 1,
|
||||||
Step::Exit => depth -= 1,
|
Step::Exit => depth -= 1,
|
||||||
Step::FloatSplit { .. } | Step::Token { .. } | Step::Error { .. } => (),
|
Step::FloatSplit { .. } => depth -= 1,
|
||||||
|
Step::Token { .. } | Step::Error { .. } => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert!(!first, "no tree at all");
|
assert!(!first, "no tree at all");
|
||||||
|
assert_eq!(depth, 0, "unbalanced tree");
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue