mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Fix bugs
This commit is contained in:
parent
0d39b1c3fa
commit
9e35bf91b8
5 changed files with 153 additions and 49 deletions
|
@ -63,8 +63,15 @@ impl HirFileId {
|
||||||
match file_id.0 {
|
match file_id.0 {
|
||||||
HirFileIdRepr::File(file_id) => db.parse(file_id),
|
HirFileIdRepr::File(file_id) => db.parse(file_id),
|
||||||
HirFileIdRepr::Macro(macro_call_id) => {
|
HirFileIdRepr::Macro(macro_call_id) => {
|
||||||
// returning an empty string looks fishy...
|
parse_macro(db, macro_call_id).unwrap_or_else(|| {
|
||||||
parse_macro(db, macro_call_id).unwrap_or_else(|| SourceFile::parse(""))
|
// Note:
|
||||||
|
// The final goal we would like to make all parse_macro success,
|
||||||
|
// such that the following log will not call anyway.
|
||||||
|
log::warn!("fail on macro_parse: {}", macro_call_id.debug_dump(db));
|
||||||
|
|
||||||
|
// returning an empty string looks fishy...
|
||||||
|
SourceFile::parse("")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -299,3 +306,16 @@ impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
|
||||||
db.lookup_intern_type_alias(self)
|
db.lookup_intern_type_alias(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl MacroCallId {
|
||||||
|
pub fn debug_dump(&self, db: &impl DefDatabase) -> String {
|
||||||
|
let loc = self.clone().loc(db);
|
||||||
|
let node = loc.ast_id.to_node(db);
|
||||||
|
let syntax_str = node.syntax().to_string();
|
||||||
|
|
||||||
|
// dump the file name
|
||||||
|
let file_id: HirFileId = self.clone().into();
|
||||||
|
let original = file_id.original_file(db);
|
||||||
|
format!("macro call [file: {:#?}] : {}", db.file_relative_path(original), syntax_str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -42,6 +42,7 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
|
||||||
unresolved_imports: Vec::new(),
|
unresolved_imports: Vec::new(),
|
||||||
unexpanded_macros: Vec::new(),
|
unexpanded_macros: Vec::new(),
|
||||||
global_macro_scope: FxHashMap::default(),
|
global_macro_scope: FxHashMap::default(),
|
||||||
|
marco_stack_count: 0,
|
||||||
};
|
};
|
||||||
collector.collect();
|
collector.collect();
|
||||||
collector.finish()
|
collector.finish()
|
||||||
|
@ -55,6 +56,10 @@ struct DefCollector<DB> {
|
||||||
unresolved_imports: Vec<(CrateModuleId, raw::ImportId, raw::ImportData)>,
|
unresolved_imports: Vec<(CrateModuleId, raw::ImportId, raw::ImportData)>,
|
||||||
unexpanded_macros: Vec<(CrateModuleId, AstId<ast::MacroCall>, Path)>,
|
unexpanded_macros: Vec<(CrateModuleId, AstId<ast::MacroCall>, Path)>,
|
||||||
global_macro_scope: FxHashMap<Name, MacroDefId>,
|
global_macro_scope: FxHashMap<Name, MacroDefId>,
|
||||||
|
|
||||||
|
/// Some macro use `$tt:tt which mean we have to handle the macro perfectly
|
||||||
|
/// To prevent stackoverflow, we add a deep counter here for prevent that.
|
||||||
|
marco_stack_count: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, DB> DefCollector<&'a DB>
|
impl<'a, DB> DefCollector<&'a DB>
|
||||||
|
@ -324,10 +329,18 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_macro_expansion(&mut self, module_id: CrateModuleId, macro_call_id: MacroCallId) {
|
fn collect_macro_expansion(&mut self, module_id: CrateModuleId, macro_call_id: MacroCallId) {
|
||||||
let file_id: HirFileId = macro_call_id.into();
|
self.marco_stack_count += 1;
|
||||||
let raw_items = self.db.raw_items(file_id);
|
|
||||||
ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
|
if self.marco_stack_count < 300 {
|
||||||
.collect(raw_items.items())
|
let file_id: HirFileId = macro_call_id.into();
|
||||||
|
let raw_items = self.db.raw_items(file_id);
|
||||||
|
ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
|
||||||
|
.collect(raw_items.items())
|
||||||
|
} else {
|
||||||
|
log::error!("Too deep macro expansion: {}", macro_call_id.debug_dump(self.db));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.marco_stack_count -= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(self) -> CrateDefMap {
|
fn finish(self) -> CrateDefMap {
|
||||||
|
|
|
@ -214,14 +214,15 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
||||||
let tree = token_tree_to_macro_items(&expanded);
|
let tree = token_tree_to_macro_items(&expanded);
|
||||||
|
|
||||||
// Eat all white space by parse it back and forth
|
// Eat all white space by parse it back and forth
|
||||||
let expansion = ast::SourceFile::parse(expansion);
|
// Because $crate will seperate in two token , will do some special treatment here
|
||||||
|
let expansion = expansion.replace("$crate", "C_C__C");
|
||||||
|
let expansion = ast::SourceFile::parse(&expansion);
|
||||||
let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0;
|
let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0;
|
||||||
let file = token_tree_to_macro_items(&expansion);
|
let file = token_tree_to_macro_items(&expansion);
|
||||||
|
let file = file.unwrap().syntax().debug_dump().trim().to_string();
|
||||||
|
let file = file.replace("C_C__C", "$crate");
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(tree.unwrap().syntax().debug_dump().trim(), file,);
|
||||||
tree.unwrap().syntax().debug_dump().trim(),
|
|
||||||
file.unwrap().syntax().debug_dump().trim()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -348,7 +349,36 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn expand_to_item_list() {
|
fn test_match_group_empty_fixed_token() {
|
||||||
|
let rules = create_rules(
|
||||||
|
r#"
|
||||||
|
macro_rules! foo {
|
||||||
|
($ ($ i:ident)* #abc) => ( fn baz { $ (
|
||||||
|
$ i ();
|
||||||
|
)*} );
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_expansion(&rules, "foo! {#abc}", "fn baz {}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_match_group_in_subtree() {
|
||||||
|
let rules = create_rules(
|
||||||
|
r#"
|
||||||
|
macro_rules! foo {
|
||||||
|
(fn $name:ident {$($i:ident)*} ) => ( fn $name() { $ (
|
||||||
|
$ i ();
|
||||||
|
)*} );
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_expansion(&rules, "foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_expand_to_item_list() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
"
|
"
|
||||||
macro_rules! structs {
|
macro_rules! structs {
|
||||||
|
@ -401,7 +431,7 @@ MACRO_ITEMS@[0; 40)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn expand_literals_to_token_tree() {
|
fn test_expand_literals_to_token_tree() {
|
||||||
fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
|
fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
|
||||||
if let tt::TokenTree::Subtree(subtree) = tt {
|
if let tt::TokenTree::Subtree(subtree) = tt {
|
||||||
return &subtree;
|
return &subtree;
|
||||||
|
@ -763,30 +793,29 @@ MACRO_ITEMS@[0; 40)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[test]
|
#[test]
|
||||||
// fn test_tt_block() {
|
// fn test_tt_block() {
|
||||||
// let rules = create_rules(
|
// let rules = create_rules(
|
||||||
// r#"
|
// r#"
|
||||||
// macro_rules! foo {
|
// macro_rules! foo {
|
||||||
// ($ i:tt) => { fn foo() $ i }
|
// ($ i:tt) => { fn foo() $ i }
|
||||||
// }
|
// }
|
||||||
// "#,
|
// "#,
|
||||||
// );
|
// );
|
||||||
// assert_expansion(&rules, r#"foo! { { 1; } }"#, r#"fn foo () {1 ;}"#);
|
// assert_expansion(&rules, r#"foo! { { 1; } }"#, r#"fn foo () {1 ;}"#);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
// #[test]
|
// #[test]
|
||||||
// fn test_tt_group() {
|
// fn test_tt_group() {
|
||||||
// let rules = create_rules(
|
// let rules = create_rules(
|
||||||
// r#"
|
// r#"
|
||||||
// macro_rules! foo {
|
// macro_rules! foo {
|
||||||
// ($($ i:tt)*) => { $($ i)* }
|
// ($($ i:tt)*) => { $($ i)* }
|
||||||
// }
|
// }
|
||||||
// "#,
|
// "#,
|
||||||
// );
|
// );
|
||||||
// assert_expansion(&rules, r#"foo! { fn foo() {} }"#, r#"fn foo () {}"#);
|
// assert_expansion(&rules, r#"foo! { fn foo() {} }"#, r#"fn foo () {}"#);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_lifetime() {
|
fn test_lifetime() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
|
@ -822,4 +851,39 @@ MACRO_ITEMS@[0; 40)
|
||||||
);
|
);
|
||||||
assert_expansion(&rules, r#"foo!(pub foo);"#, r#"pub fn foo () {}"#);
|
assert_expansion(&rules, r#"foo!(pub foo);"#, r#"pub fn foo () {}"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The following tests are based on real world situations
|
||||||
|
#[test]
|
||||||
|
fn test_winapi_struct() {
|
||||||
|
// from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366
|
||||||
|
|
||||||
|
let rules = create_rules(
|
||||||
|
r#"
|
||||||
|
macro_rules! STRUCT {
|
||||||
|
($(#[$attrs:meta])* struct $name:ident {
|
||||||
|
$($field:ident: $ftype:ty,)+
|
||||||
|
}) => (
|
||||||
|
#[repr(C)] #[derive(Copy)] $(#[$attrs])*
|
||||||
|
pub struct $name {
|
||||||
|
$(pub $field: $ftype,)+
|
||||||
|
}
|
||||||
|
impl Clone for $name {
|
||||||
|
#[inline]
|
||||||
|
fn clone(&self) -> $name { *self }
|
||||||
|
}
|
||||||
|
#[cfg(feature = "impl-default")]
|
||||||
|
impl Default for $name {
|
||||||
|
#[inline]
|
||||||
|
fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } }
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
// from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs
|
||||||
|
assert_expansion(&rules, r#"STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}"#,
|
||||||
|
"# [repr (C)] # [derive (Copy)] pub struct D3DVSHADERCAPS2_0 {pub Caps : u8 ,} impl Clone for D3DVSHADERCAPS2_0 {# [inline] fn clone (& self) -> D3DVSHADERCAPS2_0 {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DVSHADERCAPS2_0 {# [inline] fn default () -> D3DVSHADERCAPS2_0 {unsafe {$crate :: _core :: mem :: zeroed ()}}}");
|
||||||
|
assert_expansion(&rules, r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#,
|
||||||
|
"# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,6 +121,10 @@ impl Bindings {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn merge(&mut self, nested: Bindings) {
|
||||||
|
self.inner.extend(nested.inner);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, ExpandError> {
|
fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, ExpandError> {
|
||||||
|
@ -236,7 +240,21 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
crate::TokenTree::Subtree(subtree) => {
|
||||||
|
let input_subtree =
|
||||||
|
input.eat_subtree().map_err(|_| ExpandError::UnexpectedToken)?;
|
||||||
|
if subtree.delimiter != input_subtree.delimiter {
|
||||||
|
return Err(ExpandError::UnexpectedToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut input = TtCursor::new(input_subtree);
|
||||||
|
let bindings = match_lhs(&subtree, &mut input)?;
|
||||||
|
if !input.is_eof() {
|
||||||
|
return Err(ExpandError::UnexpectedToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
res.merge(bindings);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(res)
|
Ok(res)
|
||||||
|
@ -287,7 +305,15 @@ fn expand_tt(
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
crate::Leaf::Punct(punct) => tt::Leaf::from(punct.clone()).into(),
|
crate::Leaf::Punct(punct) => tt::Leaf::from(punct.clone()).into(),
|
||||||
crate::Leaf::Var(v) => bindings.get(&v.text, nesting)?.clone(),
|
crate::Leaf::Var(v) => {
|
||||||
|
if v.text == "crate" {
|
||||||
|
// FIXME: Properly handle $crate token
|
||||||
|
tt::Leaf::from(tt::Ident { text: "$crate".into(), id: TokenId::unspecified() })
|
||||||
|
.into()
|
||||||
|
} else {
|
||||||
|
bindings.get(&v.text, nesting)?.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(),
|
crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -379,8 +379,6 @@ where
|
||||||
{
|
{
|
||||||
if let Some((m, is_joint_to_next)) = iter.current_punct3(p) {
|
if let Some((m, is_joint_to_next)) = iter.current_punct3(p) {
|
||||||
if let Some((kind, text)) = match m {
|
if let Some((kind, text)) = match m {
|
||||||
('<', '<', '=') => Some((SHLEQ, "<<=")),
|
|
||||||
('>', '>', '=') => Some((SHREQ, ">>=")),
|
|
||||||
('.', '.', '.') => Some((DOTDOTDOT, "...")),
|
('.', '.', '.') => Some((DOTDOTDOT, "...")),
|
||||||
('.', '.', '=') => Some((DOTDOTEQ, "..=")),
|
('.', '.', '=') => Some((DOTDOTEQ, "..=")),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -391,23 +389,6 @@ where
|
||||||
|
|
||||||
if let Some((m, is_joint_to_next)) = iter.current_punct2(p) {
|
if let Some((m, is_joint_to_next)) = iter.current_punct2(p) {
|
||||||
if let Some((kind, text)) = match m {
|
if let Some((kind, text)) = match m {
|
||||||
('<', '<') => Some((SHL, "<<")),
|
|
||||||
('>', '>') => Some((SHR, ">>")),
|
|
||||||
|
|
||||||
('|', '|') => Some((PIPEPIPE, "||")),
|
|
||||||
('&', '&') => Some((AMPAMP, "&&")),
|
|
||||||
('%', '=') => Some((PERCENTEQ, "%=")),
|
|
||||||
('*', '=') => Some((STAREQ, "*=")),
|
|
||||||
('/', '=') => Some((SLASHEQ, "/=")),
|
|
||||||
('^', '=') => Some((CARETEQ, "^=")),
|
|
||||||
|
|
||||||
('&', '=') => Some((AMPEQ, "&=")),
|
|
||||||
('|', '=') => Some((PIPEEQ, "|=")),
|
|
||||||
('-', '=') => Some((MINUSEQ, "-=")),
|
|
||||||
('+', '=') => Some((PLUSEQ, "+=")),
|
|
||||||
('>', '=') => Some((GTEQ, ">=")),
|
|
||||||
('<', '=') => Some((LTEQ, "<=")),
|
|
||||||
|
|
||||||
('-', '>') => Some((THIN_ARROW, "->")),
|
('-', '>') => Some((THIN_ARROW, "->")),
|
||||||
('!', '=') => Some((NEQ, "!=")),
|
('!', '=') => Some((NEQ, "!=")),
|
||||||
('=', '>') => Some((FAT_ARROW, "=>")),
|
('=', '>') => Some((FAT_ARROW, "=>")),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue