mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 12:29:21 +00:00
move tests
This commit is contained in:
parent
1abe3f8275
commit
f4ee0d736c
2 changed files with 66 additions and 92 deletions
|
@ -743,82 +743,3 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
|||
self.inner.error(error, self.text_pos)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::tests::parse_macro;
|
||||
use parser::TokenSource;
|
||||
use syntax::{
|
||||
ast::{make, AstNode},
|
||||
ted,
|
||||
};
|
||||
use test_utils::assert_eq_text;
|
||||
|
||||
#[test]
|
||||
fn convert_tt_token_source() {
|
||||
let expansion = parse_macro(
|
||||
r#"
|
||||
macro_rules! literals {
|
||||
($i:ident) => {
|
||||
{
|
||||
let a = 'c';
|
||||
let c = 1000;
|
||||
let f = 12E+99_f64;
|
||||
let s = "rust1";
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.expand_tt("literals!(foo);");
|
||||
let tts = &[expansion.into()];
|
||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
|
||||
let mut tt_src = SubtreeTokenSource::new(&buffer);
|
||||
let mut tokens = vec![];
|
||||
while tt_src.current().kind != EOF {
|
||||
tokens.push((tt_src.current().kind, tt_src.text()));
|
||||
tt_src.bump();
|
||||
}
|
||||
|
||||
// [${]
|
||||
// [let] [a] [=] ['c'] [;]
|
||||
assert_eq!(tokens[2 + 3].1, "'c'");
|
||||
assert_eq!(tokens[2 + 3].0, CHAR);
|
||||
// [let] [c] [=] [1000] [;]
|
||||
assert_eq!(tokens[2 + 5 + 3].1, "1000");
|
||||
assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER);
|
||||
// [let] [f] [=] [12E+99_f64] [;]
|
||||
assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64");
|
||||
assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER);
|
||||
|
||||
// [let] [s] [=] ["rust1"] [;]
|
||||
assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\"");
|
||||
assert_eq!(tokens[2 + 15 + 3].0, STRING);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_token_tree_last_child_is_white_space() {
|
||||
let source_file = ast::SourceFile::parse("f!{}").ok().unwrap();
|
||||
let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||
let token_tree = macro_call.token_tree().unwrap();
|
||||
|
||||
// Token Tree now is :
|
||||
// TokenTree
|
||||
// - TokenTree
|
||||
// - T!['{']
|
||||
// - T!['}']
|
||||
|
||||
let token_tree = token_tree.clone_for_update();
|
||||
ted::append_child(token_tree.syntax(), make::tokens::single_space());
|
||||
let token_tree = token_tree.clone_subtree();
|
||||
// Token Tree now is :
|
||||
// TokenTree
|
||||
// - T!['{']
|
||||
// - T!['}']
|
||||
// - WHITE_SPACE
|
||||
|
||||
let tt = syntax_node_to_token_tree(token_tree.syntax()).0;
|
||||
assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue