Add token ids for all tt::Leaf

This commit is contained in:
Edwin Cheng 2019-12-12 21:47:54 +08:00
parent 46ca40ccfc
commit aceb9d7fb0
6 changed files with 72 additions and 33 deletions

View file

@ -97,11 +97,24 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> { fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
let mut result = Vec::<tt::TokenTree>::new(); let mut result = Vec::<tt::TokenTree>::new();
result.push(tt::Leaf::Punct(tt::Punct { char: '<', spacing: tt::Spacing::Alone }).into()); result.push(
tt::Leaf::Punct(tt::Punct {
char: '<',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
for i in 0..n { for i in 0..n {
if i > 0 { if i > 0 {
result result.push(
.push(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone }).into()); tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
} }
result.push( result.push(
tt::Leaf::Ident(tt::Ident { tt::Leaf::Ident(tt::Ident {
@ -112,7 +125,14 @@ fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
); );
result.extend(bound.iter().cloned()); result.extend(bound.iter().cloned());
} }
result.push(tt::Leaf::Punct(tt::Punct { char: '>', spacing: tt::Spacing::Alone }).into()); result.push(
tt::Leaf::Punct(tt::Punct {
char: '>',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
result result
} }

View file

@ -29,6 +29,7 @@ macro_rules! __quote {
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: $first, char: $first,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}).into() }).into()
] ]
} }
@ -40,10 +41,12 @@ macro_rules! __quote {
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: $first, char: $first,
spacing: tt::Spacing::Joint, spacing: tt::Spacing::Joint,
id: tt::TokenId::unspecified(),
}).into(), }).into(),
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: $sec, char: $sec,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}).into() }).into()
] ]
} }
@ -179,15 +182,15 @@ macro_rules! impl_to_to_tokentrees {
} }
impl_to_to_tokentrees! { impl_to_to_tokentrees! {
u32 => self { tt::Literal{text: self.to_string().into()} }; u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
usize => self { tt::Literal{text: self.to_string().into()}}; usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
i32 => self { tt::Literal{text: self.to_string().into()}}; i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
tt::Leaf => self { self }; tt::Leaf => self { self };
tt::Literal => self { self }; tt::Literal => self { self };
tt::Ident => self { self }; tt::Ident => self { self };
tt::Punct => self { self }; tt::Punct => self { self };
&str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}}; &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}} String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -108,7 +108,12 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError>
let tt = tt::Subtree { let tt = tt::Subtree {
delimiter: None, delimiter: None,
token_trees: vec![ token_trees: vec![
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone }).into(), tt::Leaf::from(tt::Punct {
char: '$',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() }) tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() })
.into(), .into(),
], ],

View file

@ -136,11 +136,15 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
} }
fn mk_punct(c: char) -> tt::TokenTree { fn mk_punct(c: char) -> tt::TokenTree {
tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone })) tt::TokenTree::from(tt::Leaf::from(tt::Punct {
char: c,
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}))
} }
fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
let lit = tt::Literal { text: doc_comment_text(comment) }; let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
tt::TokenTree::from(tt::Leaf::from(lit)) tt::TokenTree::from(tt::Leaf::from(lit))
} }
@ -223,24 +227,29 @@ impl Convertor {
.take(token.text().len() - 1) .take(token.text().len() - 1)
.chain(std::iter::once(last_spacing)); .chain(std::iter::once(last_spacing));
for (char, spacing) in token.text().chars().zip(spacing_iter) { for (char, spacing) in token.text().chars().zip(spacing_iter) {
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); let id = self.alloc(token.text_range());
token_trees
.push(tt::Leaf::from(tt::Punct { char, spacing, id }).into());
} }
} else { } else {
let child: tt::TokenTree = let child: tt::TokenTree = if token.kind() == T![true]
if token.kind() == T![true] || token.kind() == T![false] { || token.kind() == T![false]
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() {
} else if token.kind().is_keyword() let id = self.alloc(token.text_range());
|| token.kind() == IDENT tt::Leaf::from(tt::Literal { text: token.text().clone(), id }).into()
|| token.kind() == LIFETIME } else if token.kind().is_keyword()
{ || token.kind() == IDENT
let id = self.alloc(token.text_range()); || token.kind() == LIFETIME
let text = token.text().clone(); {
tt::Leaf::from(tt::Ident { text, id }).into() let id = self.alloc(token.text_range());
} else if token.kind().is_literal() { let text = token.text().clone();
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() tt::Leaf::from(tt::Ident { text, id }).into()
} else { } else if token.kind().is_literal() {
return None; let id = self.alloc(token.text_range());
}; tt::Leaf::from(tt::Literal { text: token.text().clone(), id }).into()
} else {
return None;
};
token_trees.push(child); token_trees.push(child);
} }
} }

View file

@ -78,12 +78,12 @@ macro_rules! foobar {
assert_eq!(expansion.token_trees.len(), 3); assert_eq!(expansion.token_trees.len(), 3);
// ($e:ident) => { foo bar $e } // ($e:ident) => { foo bar $e }
// 0 1 2 3 4 // 0123 45 6 7 89
assert_eq!(get_id(&expansion.token_trees[0]), Some(2)); assert_eq!(get_id(&expansion.token_trees[0]), Some(6));
assert_eq!(get_id(&expansion.token_trees[1]), Some(3)); assert_eq!(get_id(&expansion.token_trees[1]), Some(7));
// So baz should be 5 // So baz should be 10
assert_eq!(get_id(&expansion.token_trees[2]), Some(5)); assert_eq!(get_id(&expansion.token_trees[2]), Some(10));
} }
#[test] #[test]

View file

@ -64,12 +64,14 @@ pub enum Delimiter {
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Literal { pub struct Literal {
pub text: SmolStr, pub text: SmolStr,
pub id: TokenId,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Punct { pub struct Punct {
pub char: char, pub char: char,
pub spacing: Spacing, pub spacing: Spacing,
pub id: TokenId,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]