mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Add token ids for all tt::Leaf
This commit is contained in:
parent
46ca40ccfc
commit
aceb9d7fb0
6 changed files with 72 additions and 33 deletions
|
@ -97,11 +97,24 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
|
|||
|
||||
fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
|
||||
let mut result = Vec::<tt::TokenTree>::new();
|
||||
result.push(tt::Leaf::Punct(tt::Punct { char: '<', spacing: tt::Spacing::Alone }).into());
|
||||
result.push(
|
||||
tt::Leaf::Punct(tt::Punct {
|
||||
char: '<',
|
||||
spacing: tt::Spacing::Alone,
|
||||
id: tt::TokenId::unspecified(),
|
||||
})
|
||||
.into(),
|
||||
);
|
||||
for i in 0..n {
|
||||
if i > 0 {
|
||||
result
|
||||
.push(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone }).into());
|
||||
result.push(
|
||||
tt::Leaf::Punct(tt::Punct {
|
||||
char: ',',
|
||||
spacing: tt::Spacing::Alone,
|
||||
id: tt::TokenId::unspecified(),
|
||||
})
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
result.push(
|
||||
tt::Leaf::Ident(tt::Ident {
|
||||
|
@ -112,7 +125,14 @@ fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
|
|||
);
|
||||
result.extend(bound.iter().cloned());
|
||||
}
|
||||
result.push(tt::Leaf::Punct(tt::Punct { char: '>', spacing: tt::Spacing::Alone }).into());
|
||||
result.push(
|
||||
tt::Leaf::Punct(tt::Punct {
|
||||
char: '>',
|
||||
spacing: tt::Spacing::Alone,
|
||||
id: tt::TokenId::unspecified(),
|
||||
})
|
||||
.into(),
|
||||
);
|
||||
result
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ macro_rules! __quote {
|
|||
tt::Leaf::Punct(tt::Punct {
|
||||
char: $first,
|
||||
spacing: tt::Spacing::Alone,
|
||||
id: tt::TokenId::unspecified(),
|
||||
}).into()
|
||||
]
|
||||
}
|
||||
|
@ -40,10 +41,12 @@ macro_rules! __quote {
|
|||
tt::Leaf::Punct(tt::Punct {
|
||||
char: $first,
|
||||
spacing: tt::Spacing::Joint,
|
||||
id: tt::TokenId::unspecified(),
|
||||
}).into(),
|
||||
tt::Leaf::Punct(tt::Punct {
|
||||
char: $sec,
|
||||
spacing: tt::Spacing::Alone,
|
||||
id: tt::TokenId::unspecified(),
|
||||
}).into()
|
||||
]
|
||||
}
|
||||
|
@ -179,15 +182,15 @@ macro_rules! impl_to_to_tokentrees {
|
|||
}
|
||||
|
||||
impl_to_to_tokentrees! {
|
||||
u32 => self { tt::Literal{text: self.to_string().into()} };
|
||||
usize => self { tt::Literal{text: self.to_string().into()}};
|
||||
i32 => self { tt::Literal{text: self.to_string().into()}};
|
||||
u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
|
||||
usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
|
||||
i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
|
||||
tt::Leaf => self { self };
|
||||
tt::Literal => self { self };
|
||||
tt::Ident => self { self };
|
||||
tt::Punct => self { self };
|
||||
&str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}};
|
||||
String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}}
|
||||
&str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
|
||||
String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -108,7 +108,12 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError>
|
|||
let tt = tt::Subtree {
|
||||
delimiter: None,
|
||||
token_trees: vec![
|
||||
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone }).into(),
|
||||
tt::Leaf::from(tt::Punct {
|
||||
char: '$',
|
||||
spacing: tt::Spacing::Alone,
|
||||
id: tt::TokenId::unspecified(),
|
||||
})
|
||||
.into(),
|
||||
tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() })
|
||||
.into(),
|
||||
],
|
||||
|
|
|
@ -136,11 +136,15 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
|
|||
}
|
||||
|
||||
fn mk_punct(c: char) -> tt::TokenTree {
|
||||
tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone }))
|
||||
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
|
||||
char: c,
|
||||
spacing: tt::Spacing::Alone,
|
||||
id: tt::TokenId::unspecified(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
|
||||
let lit = tt::Literal { text: doc_comment_text(comment) };
|
||||
let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
|
||||
|
||||
tt::TokenTree::from(tt::Leaf::from(lit))
|
||||
}
|
||||
|
@ -223,12 +227,16 @@ impl Convertor {
|
|||
.take(token.text().len() - 1)
|
||||
.chain(std::iter::once(last_spacing));
|
||||
for (char, spacing) in token.text().chars().zip(spacing_iter) {
|
||||
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
|
||||
let id = self.alloc(token.text_range());
|
||||
token_trees
|
||||
.push(tt::Leaf::from(tt::Punct { char, spacing, id }).into());
|
||||
}
|
||||
} else {
|
||||
let child: tt::TokenTree =
|
||||
if token.kind() == T![true] || token.kind() == T![false] {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
let child: tt::TokenTree = if token.kind() == T![true]
|
||||
|| token.kind() == T![false]
|
||||
{
|
||||
let id = self.alloc(token.text_range());
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone(), id }).into()
|
||||
} else if token.kind().is_keyword()
|
||||
|| token.kind() == IDENT
|
||||
|| token.kind() == LIFETIME
|
||||
|
@ -237,7 +245,8 @@ impl Convertor {
|
|||
let text = token.text().clone();
|
||||
tt::Leaf::from(tt::Ident { text, id }).into()
|
||||
} else if token.kind().is_literal() {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
let id = self.alloc(token.text_range());
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone(), id }).into()
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
|
|
@ -78,12 +78,12 @@ macro_rules! foobar {
|
|||
|
||||
assert_eq!(expansion.token_trees.len(), 3);
|
||||
// ($e:ident) => { foo bar $e }
|
||||
// 0 1 2 3 4
|
||||
assert_eq!(get_id(&expansion.token_trees[0]), Some(2));
|
||||
assert_eq!(get_id(&expansion.token_trees[1]), Some(3));
|
||||
// 0123 45 6 7 89
|
||||
assert_eq!(get_id(&expansion.token_trees[0]), Some(6));
|
||||
assert_eq!(get_id(&expansion.token_trees[1]), Some(7));
|
||||
|
||||
// So baz should be 5
|
||||
assert_eq!(get_id(&expansion.token_trees[2]), Some(5));
|
||||
// So baz should be 10
|
||||
assert_eq!(get_id(&expansion.token_trees[2]), Some(10));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -64,12 +64,14 @@ pub enum Delimiter {
|
|||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Literal {
|
||||
pub text: SmolStr,
|
||||
pub id: TokenId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct Punct {
|
||||
pub char: char,
|
||||
pub spacing: Spacing,
|
||||
pub id: TokenId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue