mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 13:25:09 +00:00
Add a test for macro parsing
This commit is contained in:
parent
8eac450f41
commit
8cf156d85b
2 changed files with 80 additions and 10 deletions
|
@ -164,14 +164,18 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
||||||
crate::MacroRules::parse(&definition_tt).unwrap()
|
crate::MacroRules::parse(&definition_tt).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
|
fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
|
||||||
let source_file = ast::SourceFile::parse(invocation);
|
let source_file = ast::SourceFile::parse(invocation);
|
||||||
let macro_invocation =
|
let macro_invocation =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
||||||
|
|
||||||
let expanded = rules.expand(&invocation_tt).unwrap();
|
rules.expand(&invocation_tt).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
|
||||||
|
let expanded = expand(rules, invocation);
|
||||||
assert_eq!(expanded.to_string(), expansion);
|
assert_eq!(expanded.to_string(), expansion);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,4 +272,57 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
||||||
assert_expansion(&rules, "foo! { Foo,# Bar }", "struct Foo ; struct Bar ;");
|
assert_expansion(&rules, "foo! { Foo,# Bar }", "struct Foo ; struct Bar ;");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn expand_to_item_list() {
|
||||||
|
let rules = create_rules(
|
||||||
|
"
|
||||||
|
macro_rules! structs {
|
||||||
|
($($i:ident),*) => {
|
||||||
|
$(struct $i { field: u32 } )*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
);
|
||||||
|
let expansion = expand(&rules, "structs!(Foo, Bar)");
|
||||||
|
let tree = token_tree_to_ast_item_list(&expansion);
|
||||||
|
assert_eq!(
|
||||||
|
tree.syntax().debug_dump().trim(),
|
||||||
|
r#"
|
||||||
|
SOURCE_FILE@[0; 40)
|
||||||
|
STRUCT_DEF@[0; 20)
|
||||||
|
STRUCT_KW@[0; 6)
|
||||||
|
NAME@[6; 9)
|
||||||
|
IDENT@[6; 9) "Foo"
|
||||||
|
NAMED_FIELD_DEF_LIST@[9; 20)
|
||||||
|
L_CURLY@[9; 10)
|
||||||
|
NAMED_FIELD_DEF@[10; 19)
|
||||||
|
NAME@[10; 15)
|
||||||
|
IDENT@[10; 15) "field"
|
||||||
|
COLON@[15; 16)
|
||||||
|
PATH_TYPE@[16; 19)
|
||||||
|
PATH@[16; 19)
|
||||||
|
PATH_SEGMENT@[16; 19)
|
||||||
|
NAME_REF@[16; 19)
|
||||||
|
IDENT@[16; 19) "u32"
|
||||||
|
R_CURLY@[19; 20)
|
||||||
|
STRUCT_DEF@[20; 40)
|
||||||
|
STRUCT_KW@[20; 26)
|
||||||
|
NAME@[26; 29)
|
||||||
|
IDENT@[26; 29) "Bar"
|
||||||
|
NAMED_FIELD_DEF_LIST@[29; 40)
|
||||||
|
L_CURLY@[29; 30)
|
||||||
|
NAMED_FIELD_DEF@[30; 39)
|
||||||
|
NAME@[30; 35)
|
||||||
|
IDENT@[30; 35) "field"
|
||||||
|
COLON@[35; 36)
|
||||||
|
PATH_TYPE@[36; 39)
|
||||||
|
PATH@[36; 39)
|
||||||
|
PATH_SEGMENT@[36; 39)
|
||||||
|
NAME_REF@[36; 39)
|
||||||
|
IDENT@[36; 39) "u32"
|
||||||
|
R_CURLY@[39; 40)"#
|
||||||
|
.trim()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,17 +129,26 @@ impl TtTokenSource {
|
||||||
is_joint_to_next: false,
|
is_joint_to_next: false,
|
||||||
text: l.text.clone(),
|
text: l.text.clone(),
|
||||||
},
|
},
|
||||||
tt::Leaf::Punct(p) => Tok {
|
tt::Leaf::Punct(p) => {
|
||||||
kind: SyntaxKind::from_char(p.char).unwrap(),
|
let kind = match p.char {
|
||||||
is_joint_to_next: p.spacing == tt::Spacing::Joint,
|
// lexer may produce combpund tokens for these ones
|
||||||
text: {
|
'.' => DOT,
|
||||||
|
':' => COLON,
|
||||||
|
'=' => EQ,
|
||||||
|
'!' => EXCL,
|
||||||
|
'-' => MINUS,
|
||||||
|
c => SyntaxKind::from_char(c).unwrap(),
|
||||||
|
};
|
||||||
|
let text = {
|
||||||
let mut buf = [0u8; 4];
|
let mut buf = [0u8; 4];
|
||||||
let s: &str = p.char.encode_utf8(&mut buf);
|
let s: &str = p.char.encode_utf8(&mut buf);
|
||||||
SmolStr::new(s)
|
SmolStr::new(s)
|
||||||
},
|
};
|
||||||
},
|
Tok { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text }
|
||||||
|
}
|
||||||
tt::Leaf::Ident(ident) => {
|
tt::Leaf::Ident(ident) => {
|
||||||
Tok { kind: IDENT, is_joint_to_next: false, text: ident.text.clone() }
|
let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT);
|
||||||
|
Tok { kind, is_joint_to_next: false, text: ident.text.clone() }
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
self.tokens.push(tok)
|
self.tokens.push(tok)
|
||||||
|
@ -161,7 +170,11 @@ impl TtTokenSource {
|
||||||
|
|
||||||
impl TokenSource for TtTokenSource {
|
impl TokenSource for TtTokenSource {
|
||||||
fn token_kind(&self, pos: usize) -> SyntaxKind {
|
fn token_kind(&self, pos: usize) -> SyntaxKind {
|
||||||
self.tokens[pos].kind
|
if let Some(tok) = self.tokens.get(pos) {
|
||||||
|
tok.kind
|
||||||
|
} else {
|
||||||
|
SyntaxKind::EOF
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fn is_token_joint_to_next(&self, pos: usize) -> bool {
|
fn is_token_joint_to_next(&self, pos: usize) -> bool {
|
||||||
self.tokens[pos].is_joint_to_next
|
self.tokens[pos].is_joint_to_next
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue