mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 12:29:21 +00:00
Fix float-split hack not setting up spans correctly
This commit is contained in:
parent
8423893d1c
commit
394d11b0fa
15 changed files with 197 additions and 58 deletions
|
@ -16,6 +16,12 @@ pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
|
||||||
pub struct SyntaxContextId(InternId);
|
pub struct SyntaxContextId(InternId);
|
||||||
crate::impl_intern_key!(SyntaxContextId);
|
crate::impl_intern_key!(SyntaxContextId);
|
||||||
|
|
||||||
|
impl fmt::Display for SyntaxContextId {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", self.0.as_u32())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl SyntaxContext for SyntaxContextId {
|
impl SyntaxContext for SyntaxContextId {
|
||||||
const DUMMY: Self = Self::ROOT;
|
const DUMMY: Self = Self::ROOT;
|
||||||
// veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so
|
// veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so
|
||||||
|
|
|
@ -1322,6 +1322,7 @@ fn derive_macro_as_call_id(
|
||||||
item_attr: &AstIdWithPath<ast::Adt>,
|
item_attr: &AstIdWithPath<ast::Adt>,
|
||||||
derive_attr_index: AttrId,
|
derive_attr_index: AttrId,
|
||||||
derive_pos: u32,
|
derive_pos: u32,
|
||||||
|
call_site: SyntaxContextId,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
||||||
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
||||||
|
@ -1336,8 +1337,7 @@ fn derive_macro_as_call_id(
|
||||||
derive_index: derive_pos,
|
derive_index: derive_pos,
|
||||||
derive_attr_index,
|
derive_attr_index,
|
||||||
},
|
},
|
||||||
//FIXME
|
call_site,
|
||||||
SyntaxContextId::ROOT,
|
|
||||||
);
|
);
|
||||||
Ok((macro_id, def_id, call_id))
|
Ok((macro_id, def_id, call_id))
|
||||||
}
|
}
|
||||||
|
@ -1367,8 +1367,7 @@ fn attr_macro_as_call_id(
|
||||||
attr_args: Arc::new(arg),
|
attr_args: Arc::new(arg),
|
||||||
invoc_attr_index: macro_attr.id,
|
invoc_attr_index: macro_attr.id,
|
||||||
},
|
},
|
||||||
//FIXME
|
macro_attr.ctxt,
|
||||||
SyntaxContextId::ROOT,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
intern::impl_internable!(
|
intern::impl_internable!(
|
||||||
|
|
|
@ -23,10 +23,12 @@ macro_rules! f {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// +tokenids
|
// +spans
|
||||||
f!(struct MyTraitMap2);
|
f!(struct MyTraitMap2);
|
||||||
"#,
|
"#,
|
||||||
// FIXME: #SpanAnchor(FileId(0), 1)@91..92 why is there whitespace annotated with a span here?
|
// FIXME: #SpanAnchor(FileId(0), 1)@91..92\2# why is there whitespace annotated with a span
|
||||||
|
// here? Presumably because the leading `::` is getting two spans instead of one? Sounds
|
||||||
|
// liek glueing might be failing here
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
macro_rules! f {
|
macro_rules! f {
|
||||||
( struct $ident:ident ) => {
|
( struct $ident:ident ) => {
|
||||||
|
@ -36,9 +38,9 @@ macro_rules! f {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73
|
struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@20..31\0# {#FileId(0):1@72..73\2#
|
||||||
map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 #SpanAnchor(FileId(0), 1)@91..92::#SpanAnchor(FileId(0), 1)@92..93std#SpanAnchor(FileId(0), 1)@93..96::#SpanAnchor(FileId(0), 1)@97..98collections#SpanAnchor(FileId(0), 1)@98..109::#SpanAnchor(FileId(0), 1)@110..111HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123
|
map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@91..92\2#::#FileId(0):1@92..93\2#std#FileId(0):1@93..96\2#::#FileId(0):1@97..98\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@110..111\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2#
|
||||||
}#SpanAnchor(FileId(0), 1)@132..133
|
}#FileId(0):1@132..133\2#
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -49,18 +51,19 @@ fn token_mapping_floats() {
|
||||||
// (and related issues)
|
// (and related issues)
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
// +tokenids
|
// +spans
|
||||||
macro_rules! f {
|
macro_rules! f {
|
||||||
($($tt:tt)*) => {
|
($($tt:tt)*) => {
|
||||||
$($tt)*
|
$($tt)*
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// +tokenids
|
// +spans
|
||||||
f! {
|
f! {
|
||||||
fn main() {
|
fn main() {
|
||||||
1;
|
1;
|
||||||
1.0;
|
1.0;
|
||||||
|
((1,),).0.0;
|
||||||
let x = 1;
|
let x = 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,18 +71,19 @@ f! {
|
||||||
|
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
// +tokenids
|
// +spans
|
||||||
macro_rules! f {
|
macro_rules! f {
|
||||||
($($tt:tt)*) => {
|
($($tt:tt)*) => {
|
||||||
$($tt)*
|
$($tt)*
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn#SpanAnchor(FileId(0), 2)@22..24 main#SpanAnchor(FileId(0), 2)@25..29(#SpanAnchor(FileId(0), 2)@29..30)#SpanAnchor(FileId(0), 2)@30..31 {#SpanAnchor(FileId(0), 2)@32..33
|
fn#FileId(0):2@19..21\0# main#FileId(0):2@22..26\0#(#FileId(0):2@26..27\0#)#FileId(0):2@27..28\0# {#FileId(0):2@29..30\0#
|
||||||
1#SpanAnchor(FileId(0), 2)@42..43;#SpanAnchor(FileId(0), 2)@43..44
|
1#FileId(0):2@39..40\0#;#FileId(0):2@40..41\0#
|
||||||
1.0#SpanAnchor(FileId(0), 2)@53..56;#SpanAnchor(FileId(0), 2)@56..57
|
1.0#FileId(0):2@50..53\0#;#FileId(0):2@53..54\0#
|
||||||
let#SpanAnchor(FileId(0), 2)@66..69 x#SpanAnchor(FileId(0), 2)@70..71 =#SpanAnchor(FileId(0), 2)@72..73 1#SpanAnchor(FileId(0), 2)@74..75;#SpanAnchor(FileId(0), 2)@75..76
|
(#FileId(0):2@63..64\0#(#FileId(0):2@64..65\0#1#FileId(0):2@65..66\0#,#FileId(0):2@66..67\0# )#FileId(0):2@67..68\0#,#FileId(0):2@68..69\0# )#FileId(0):2@69..70\0#.#FileId(0):2@70..71\0#0#FileId(0):2@71..74\0#.#FileId(0):2@71..74\0#0#FileId(0):2@71..74\0#;#FileId(0):2@74..75\0#
|
||||||
}#SpanAnchor(FileId(0), 2)@81..82
|
let#FileId(0):2@84..87\0# x#FileId(0):2@88..89\0# =#FileId(0):2@90..91\0# 1#FileId(0):2@92..93\0#;#FileId(0):2@93..94\0#
|
||||||
|
}#FileId(0):2@99..100\0#
|
||||||
|
|
||||||
|
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -123,7 +127,7 @@ macro_rules! identity {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main(foo: ()) {
|
fn main(foo: ()) {
|
||||||
format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
|
format_args/*+spans*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
|
||||||
}
|
}
|
||||||
|
|
||||||
"#,
|
"#,
|
||||||
|
@ -137,13 +141,36 @@ macro_rules! identity {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main(foo: ()) {
|
fn main(foo: ()) {
|
||||||
builtin#SpanAnchor(FileId(0), 0)@0..0 ##SpanAnchor(FileId(0), 0)@0..0format_args#SpanAnchor(FileId(0), 0)@0..0 (#SpanAnchor(FileId(0), 6)@25..26"{} {} {}"#SpanAnchor(FileId(0), 6)@26..36,#SpanAnchor(FileId(0), 6)@36..37 format_args#SpanAnchor(FileId(0), 6)@38..49!#SpanAnchor(FileId(0), 6)@49..50(#SpanAnchor(FileId(0), 6)@50..51"{}"#SpanAnchor(FileId(0), 6)@51..55,#SpanAnchor(FileId(0), 6)@55..56 0#SpanAnchor(FileId(0), 6)@57..58)#SpanAnchor(FileId(0), 6)@58..59,#SpanAnchor(FileId(0), 6)@59..60 foo#SpanAnchor(FileId(0), 6)@61..64,#SpanAnchor(FileId(0), 6)@64..65 identity#SpanAnchor(FileId(0), 6)@66..74!#SpanAnchor(FileId(0), 6)@74..75(#SpanAnchor(FileId(0), 6)@75..7610#SpanAnchor(FileId(0), 6)@76..78)#SpanAnchor(FileId(0), 6)@78..79,#SpanAnchor(FileId(0), 6)@79..80 "bar"#SpanAnchor(FileId(0), 6)@81..86)#SpanAnchor(FileId(0), 6)@86..87
|
builtin#FileId(0):0@0..0\0# ##FileId(0):0@0..0\0#format_args#FileId(0):0@0..0\0# (#FileId(0):6@22..23\0#"{} {} {}"#FileId(0):6@23..33\0#,#FileId(0):6@33..34\0# format_args#FileId(0):6@35..46\0#!#FileId(0):6@46..47\0#(#FileId(0):6@47..48\0#"{}"#FileId(0):6@48..52\0#,#FileId(0):6@52..53\0# 0#FileId(0):6@54..55\0#)#FileId(0):6@55..56\0#,#FileId(0):6@56..57\0# foo#FileId(0):6@58..61\0#,#FileId(0):6@61..62\0# identity#FileId(0):6@63..71\0#!#FileId(0):6@71..72\0#(#FileId(0):6@72..73\0#10#FileId(0):6@73..75\0#)#FileId(0):6@75..76\0#,#FileId(0):6@76..77\0# "bar"#FileId(0):6@78..83\0#)#FileId(0):6@83..84\0#
|
||||||
}
|
}
|
||||||
|
|
||||||
"##]],
|
"##]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn token_mapping_across_files() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- /lib.rs
|
||||||
|
#[macro_use]
|
||||||
|
mod foo;
|
||||||
|
|
||||||
|
mk_struct/*+spans*/!(Foo with u32);
|
||||||
|
//- /foo.rs
|
||||||
|
macro_rules! mk_struct {
|
||||||
|
($foo:ident with $ty:ty) => { struct $foo($ty); }
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
#[macro_use]
|
||||||
|
mod foo;
|
||||||
|
|
||||||
|
struct#FileId(1):1@59..65\2# Foo#FileId(0):2@21..24\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@30..33\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2#
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn float_field_access_macro_input() {
|
fn float_field_access_macro_input() {
|
||||||
check(
|
check(
|
||||||
|
|
|
@ -103,11 +103,11 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
for (call, exp) in expansions.into_iter().rev() {
|
for (call, exp) in expansions.into_iter().rev() {
|
||||||
let mut tree = false;
|
let mut tree = false;
|
||||||
let mut expect_errors = false;
|
let mut expect_errors = false;
|
||||||
let mut show_token_ids = false;
|
let mut show_spans = false;
|
||||||
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
|
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
|
||||||
tree |= comment.to_string().contains("+tree");
|
tree |= comment.to_string().contains("+tree");
|
||||||
expect_errors |= comment.to_string().contains("+errors");
|
expect_errors |= comment.to_string().contains("+errors");
|
||||||
show_token_ids |= comment.to_string().contains("+tokenids");
|
show_spans |= comment.to_string().contains("+spans");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut expn_text = String::new();
|
let mut expn_text = String::new();
|
||||||
|
@ -128,10 +128,8 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
parse.syntax_node(),
|
parse.syntax_node(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let pp = pretty_print_macro_expansion(
|
let pp =
|
||||||
parse.syntax_node(),
|
pretty_print_macro_expansion(parse.syntax_node(), show_spans.then_some(&*token_map));
|
||||||
show_token_ids.then_some(&*token_map),
|
|
||||||
);
|
|
||||||
let indent = IndentLevel::from_node(call.syntax());
|
let indent = IndentLevel::from_node(call.syntax());
|
||||||
let pp = reindent(indent, pp);
|
let pp = reindent(indent, pp);
|
||||||
format_to!(expn_text, "{}", pp);
|
format_to!(expn_text, "{}", pp);
|
||||||
|
@ -166,9 +164,18 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(src) = src {
|
if let Some(src) = src {
|
||||||
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
|
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
|
||||||
let pp = pretty_print_macro_expansion(src.value, None);
|
let call = src.file_id.call_node(&db).expect("macro file");
|
||||||
|
let mut show_spans = false;
|
||||||
|
for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) {
|
||||||
|
show_spans |= comment.to_string().contains("+spans");
|
||||||
|
}
|
||||||
|
let pp = pretty_print_macro_expansion(
|
||||||
|
src.value,
|
||||||
|
show_spans.then_some(&db.span_map(src.file_id)),
|
||||||
|
);
|
||||||
format_to!(expanded_text, "\n{}", pp)
|
format_to!(expanded_text, "\n{}", pp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -250,7 +257,14 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> Stri
|
||||||
format_to!(res, "{}", token);
|
format_to!(res, "{}", token);
|
||||||
if let Some(map) = map {
|
if let Some(map) = map {
|
||||||
if let Some(span) = map.span_for_range(token.text_range()) {
|
if let Some(span) = map.span_for_range(token.text_range()) {
|
||||||
format_to!(res, "#{:?}@{:?}", span.anchor, span.range);
|
format_to!(
|
||||||
|
res,
|
||||||
|
"#{:?}:{:?}@{:?}\\{}#",
|
||||||
|
span.anchor.file_id,
|
||||||
|
span.anchor.ast_id.into_raw(),
|
||||||
|
span.range,
|
||||||
|
span.ctx
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,6 +93,41 @@ fn foo() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn macro_rules_in_attr() {
|
||||||
|
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: identity
|
||||||
|
macro_rules! id {
|
||||||
|
($($t:tt)*) => {
|
||||||
|
$($t)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
id! {
|
||||||
|
#[proc_macros::identity]
|
||||||
|
impl Foo for WrapBj {
|
||||||
|
async fn foo(&self) {
|
||||||
|
self.id().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
macro_rules! id {
|
||||||
|
($($t:tt)*) => {
|
||||||
|
$($t)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#[proc_macros::identity] impl Foo for WrapBj {
|
||||||
|
async fn foo(&self ) {
|
||||||
|
self .id().await ;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn float_parsing_panic() {
|
fn float_parsing_panic() {
|
||||||
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
|
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
|
||||||
|
@ -127,3 +162,27 @@ macro_rules! id {
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn float_attribute_mapping() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- proc_macros: identity
|
||||||
|
//+spans
|
||||||
|
#[proc_macros::identity]
|
||||||
|
fn foo(&self) {
|
||||||
|
self.0. 1;
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
//+spans
|
||||||
|
#[proc_macros::identity]
|
||||||
|
fn foo(&self) {
|
||||||
|
self.0. 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn#FileId(0):1@34..36\0# foo#FileId(0):1@37..40\0#(#FileId(0):1@40..41\0#&#FileId(0):1@41..42\0#self#FileId(0):1@42..46\0# )#FileId(0):1@46..47\0# {#FileId(0):1@48..49\0#
|
||||||
|
self#FileId(0):1@54..58\0# .#FileId(0):1@58..59\0#0#FileId(0):1@59..60\0#.#FileId(0):1@60..61\0#1#FileId(0):1@62..63\0#;#FileId(0):1@63..64\0#
|
||||||
|
}#FileId(0):1@65..66\0#"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -219,6 +219,7 @@ enum MacroDirectiveKind {
|
||||||
ast_id: AstIdWithPath<ast::Adt>,
|
ast_id: AstIdWithPath<ast::Adt>,
|
||||||
derive_attr: AttrId,
|
derive_attr: AttrId,
|
||||||
derive_pos: usize,
|
derive_pos: usize,
|
||||||
|
call_site: SyntaxContextId,
|
||||||
},
|
},
|
||||||
Attr {
|
Attr {
|
||||||
ast_id: AstIdWithPath<ast::Item>,
|
ast_id: AstIdWithPath<ast::Item>,
|
||||||
|
@ -324,7 +325,7 @@ impl DefCollector<'_> {
|
||||||
.parse_path_comma_token_tree(self.db.upcast())
|
.parse_path_comma_token_tree(self.db.upcast())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter_map(|feat| match feat.segments() {
|
.filter_map(|(feat, _)| match feat.segments() {
|
||||||
[name] => Some(name.to_smol_str()),
|
[name] => Some(name.to_smol_str()),
|
||||||
_ => None,
|
_ => None,
|
||||||
});
|
});
|
||||||
|
@ -1139,12 +1140,13 @@ impl DefCollector<'_> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
|
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
|
||||||
let id = derive_macro_as_call_id(
|
let id = derive_macro_as_call_id(
|
||||||
self.db,
|
self.db,
|
||||||
ast_id,
|
ast_id,
|
||||||
*derive_attr,
|
*derive_attr,
|
||||||
*derive_pos as u32,
|
*derive_pos as u32,
|
||||||
|
*call_site,
|
||||||
self.def_map.krate,
|
self.def_map.krate,
|
||||||
resolver,
|
resolver,
|
||||||
);
|
);
|
||||||
|
@ -1242,7 +1244,7 @@ impl DefCollector<'_> {
|
||||||
match attr.parse_path_comma_token_tree(self.db.upcast()) {
|
match attr.parse_path_comma_token_tree(self.db.upcast()) {
|
||||||
Some(derive_macros) => {
|
Some(derive_macros) => {
|
||||||
let mut len = 0;
|
let mut len = 0;
|
||||||
for (idx, path) in derive_macros.enumerate() {
|
for (idx, (path, call_site)) in derive_macros.enumerate() {
|
||||||
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
|
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
|
||||||
self.unresolved_macros.push(MacroDirective {
|
self.unresolved_macros.push(MacroDirective {
|
||||||
module_id: directive.module_id,
|
module_id: directive.module_id,
|
||||||
|
@ -1251,6 +1253,7 @@ impl DefCollector<'_> {
|
||||||
ast_id,
|
ast_id,
|
||||||
derive_attr: attr.id,
|
derive_attr: attr.id,
|
||||||
derive_pos: idx,
|
derive_pos: idx,
|
||||||
|
call_site,
|
||||||
},
|
},
|
||||||
container: directive.container,
|
container: directive.container,
|
||||||
});
|
});
|
||||||
|
@ -1438,7 +1441,7 @@ impl DefCollector<'_> {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
|
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
|
||||||
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
|
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
|
||||||
directive.module_id,
|
directive.module_id,
|
||||||
MacroCallKind::Derive {
|
MacroCallKind::Derive {
|
||||||
|
@ -1828,7 +1831,7 @@ impl ModCollector<'_, '_> {
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
for path in paths {
|
for (path, _) in paths {
|
||||||
if let Some(name) = path.as_ident() {
|
if let Some(name) = path.as_ident() {
|
||||||
single_imports.push(name.clone());
|
single_imports.push(name.clone());
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{db::DefDatabase, test_db::TestDB};
|
use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
|
||||||
|
|
||||||
use super::DefMap;
|
|
||||||
|
|
||||||
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
|
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
|
||||||
let db = TestDB::with_files(ra_fixture);
|
let db = TestDB::with_files(ra_fixture);
|
||||||
|
|
|
@ -7,7 +7,7 @@ use base_db::{
|
||||||
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase,
|
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase,
|
||||||
Upcast,
|
Upcast,
|
||||||
};
|
};
|
||||||
use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData, InFile};
|
use hir_expand::{db::ExpandDatabase, InFile};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::{algo, ast, AstNode};
|
use syntax::{algo, ast, AstNode};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -34,7 +34,7 @@ pub(crate) struct TestDB {
|
||||||
impl Default for TestDB {
|
impl Default for TestDB {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
let mut this = Self { storage: Default::default(), events: Default::default() };
|
let mut this = Self { storage: Default::default(), events: Default::default() };
|
||||||
this.intern_syntax_context(SyntaxContextData::root());
|
this.setup_syntax_context_root();
|
||||||
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,16 @@
|
||||||
use std::{fmt, ops};
|
use std::{fmt, ops};
|
||||||
|
|
||||||
use ::tt::SpanAnchor as _;
|
use ::tt::SpanAnchor as _;
|
||||||
use base_db::{span::SpanAnchor, CrateId};
|
use base_db::{
|
||||||
|
span::{SpanAnchor, SyntaxContextId},
|
||||||
|
CrateId,
|
||||||
|
};
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
|
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
|
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -54,6 +57,9 @@ impl RawAttrs {
|
||||||
id,
|
id,
|
||||||
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
|
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
|
||||||
path: Interned::new(ModPath::from(crate::name!(doc))),
|
path: Interned::new(ModPath::from(crate::name!(doc))),
|
||||||
|
ctxt: hygiene
|
||||||
|
.span_for_range(comment.syntax().text_range())
|
||||||
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
@ -191,6 +197,7 @@ pub struct Attr {
|
||||||
pub id: AttrId,
|
pub id: AttrId,
|
||||||
pub path: Interned<ModPath>,
|
pub path: Interned<ModPath>,
|
||||||
pub input: Option<Interned<AttrInput>>,
|
pub input: Option<Interned<AttrInput>>,
|
||||||
|
pub ctxt: SyntaxContextId,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
@ -235,7 +242,14 @@ impl Attr {
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
Some(Attr { id, path, input })
|
Some(Attr {
|
||||||
|
id,
|
||||||
|
path,
|
||||||
|
input,
|
||||||
|
ctxt: hygiene
|
||||||
|
.span_for_range(ast.syntax().text_range())
|
||||||
|
.map_or(SyntaxContextId::ROOT, |s| s.ctx),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
||||||
|
@ -284,9 +298,8 @@ impl Attr {
|
||||||
pub fn parse_path_comma_token_tree<'a>(
|
pub fn parse_path_comma_token_tree<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn ExpandDatabase,
|
db: &'a dyn ExpandDatabase,
|
||||||
) -> Option<impl Iterator<Item = ModPath> + 'a> {
|
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
|
||||||
let args = self.token_tree_value()?;
|
let args = self.token_tree_value()?;
|
||||||
dbg!(args);
|
|
||||||
|
|
||||||
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
||||||
return None;
|
return None;
|
||||||
|
@ -298,6 +311,11 @@ impl Attr {
|
||||||
if tts.is_empty() {
|
if tts.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
// FIXME: Absolutely wrong
|
||||||
|
let call_site = match tts.first().unwrap() {
|
||||||
|
tt::TokenTree::Leaf(l) => l.span().ctx,
|
||||||
|
tt::TokenTree::Subtree(s) => s.delimiter.open.ctx,
|
||||||
|
};
|
||||||
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
|
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
|
||||||
// here.
|
// here.
|
||||||
let subtree = tt::Subtree {
|
let subtree = tt::Subtree {
|
||||||
|
@ -313,7 +331,7 @@ impl Attr {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let path = meta.path()?;
|
let path = meta.path()?;
|
||||||
ModPath::from_src(db, path, &span_map)
|
Some((ModPath::from_src(db, path, &span_map)?, call_site))
|
||||||
});
|
});
|
||||||
|
|
||||||
Some(paths)
|
Some(paths)
|
||||||
|
|
|
@ -103,7 +103,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
&self,
|
&self,
|
||||||
macro_file: MacroFile,
|
macro_file: MacroFile,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
|
||||||
// TODO: transparent?
|
// FIXME: This always allocates one for non macro files which is wasteful.
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn span_map(&self, file_id: HirFileId) -> Arc<SpanMap>;
|
fn span_map(&self, file_id: HirFileId) -> Arc<SpanMap>;
|
||||||
|
|
||||||
|
@ -117,6 +117,8 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
|
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
|
fn setup_syntax_context_root(&self) -> ();
|
||||||
|
#[salsa::transparent]
|
||||||
#[salsa::invoke(hygiene::apply_mark)]
|
#[salsa::invoke(hygiene::apply_mark)]
|
||||||
fn apply_mark(
|
fn apply_mark(
|
||||||
&self,
|
&self,
|
||||||
|
@ -770,3 +772,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
||||||
|
db.intern_syntax_context(SyntaxContextData::root());
|
||||||
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ use base_db::{
|
||||||
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
|
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
|
||||||
};
|
};
|
||||||
use hir_def::{db::DefDatabase, ModuleId};
|
use hir_def::{db::DefDatabase, ModuleId};
|
||||||
use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData};
|
use hir_expand::db::ExpandDatabase;
|
||||||
use nohash_hasher::IntMap;
|
use nohash_hasher::IntMap;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
|
@ -30,7 +30,7 @@ pub(crate) struct TestDB {
|
||||||
impl Default for TestDB {
|
impl Default for TestDB {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
let mut this = Self { storage: Default::default(), events: Default::default() };
|
let mut this = Self { storage: Default::default(), events: Default::default() };
|
||||||
this.intern_syntax_context(SyntaxContextData::root());
|
this.setup_syntax_context_root();
|
||||||
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,10 +63,10 @@ fn infer_macros_expanded() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
!0..17 '{Foo(v...,2,])}': Foo
|
!0..21 '{Foo(v...2),])}': Foo
|
||||||
!1..4 'Foo': Foo({unknown}) -> Foo
|
!1..4 'Foo': Foo({unknown}) -> Foo
|
||||||
!1..16 'Foo(vec![1,2,])': Foo
|
!1..20 'Foo(ve...(2),])': Foo
|
||||||
!5..15 'vec![1,2,]': {unknown}
|
!5..19 'vec![(1),(2),]': {unknown}
|
||||||
155..181 '{ ...,2); }': ()
|
155..181 '{ ...,2); }': ()
|
||||||
165..166 'x': Foo
|
165..166 'x': Foo
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -96,10 +96,10 @@ fn infer_legacy_textual_scoped_macros_expanded() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
!0..17 '{Foo(v...,2,])}': Foo
|
!0..21 '{Foo(v...2),])}': Foo
|
||||||
!1..4 'Foo': Foo({unknown}) -> Foo
|
!1..4 'Foo': Foo({unknown}) -> Foo
|
||||||
!1..16 'Foo(vec![1,2,])': Foo
|
!1..20 'Foo(ve...(2),])': Foo
|
||||||
!5..15 'vec![1,2,]': {unknown}
|
!5..19 'vec![(1),(2),]': {unknown}
|
||||||
194..250 '{ ...,2); }': ()
|
194..250 '{ ...,2); }': ()
|
||||||
204..205 'x': Foo
|
204..205 'x': Foo
|
||||||
227..228 'y': {unknown}
|
227..228 'y': {unknown}
|
||||||
|
|
|
@ -144,6 +144,7 @@ impl RootDatabase {
|
||||||
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
|
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
|
||||||
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
|
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
|
||||||
db.update_parse_query_lru_capacity(lru_capacity);
|
db.update_parse_query_lru_capacity(lru_capacity);
|
||||||
|
db.setup_syntax_context_root();
|
||||||
db
|
db
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -717,24 +717,29 @@ where
|
||||||
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
||||||
/// This occurs when a float literal is used as a field access.
|
/// This occurs when a float literal is used as a field access.
|
||||||
fn float_split(&mut self, has_pseudo_dot: bool) {
|
fn float_split(&mut self, has_pseudo_dot: bool) {
|
||||||
// TODO: FIXME this breaks the hygiene map
|
let (text, span) = match self.cursor.token_tree() {
|
||||||
let (text, _span) = match self.cursor.token_tree() {
|
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
|
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
|
||||||
(lit.text.as_str(), lit.span)
|
(lit.text.as_str(), lit.span)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
// FIXME: Span splitting
|
||||||
match text.split_once('.') {
|
match text.split_once('.') {
|
||||||
Some((left, right)) => {
|
Some((left, right)) => {
|
||||||
assert!(!left.is_empty());
|
assert!(!left.is_empty());
|
||||||
|
|
||||||
self.inner.start_node(SyntaxKind::NAME_REF);
|
self.inner.start_node(SyntaxKind::NAME_REF);
|
||||||
self.inner.token(SyntaxKind::INT_NUMBER, left);
|
self.inner.token(SyntaxKind::INT_NUMBER, left);
|
||||||
self.inner.finish_node();
|
self.inner.finish_node();
|
||||||
|
let range = TextRange::at(self.text_pos, TextSize::of(left));
|
||||||
|
self.token_map.insert(range, span);
|
||||||
|
|
||||||
// here we move the exit up, the original exit has been deleted in process
|
// here we move the exit up, the original exit has been deleted in process
|
||||||
self.inner.finish_node();
|
self.inner.finish_node();
|
||||||
|
|
||||||
self.inner.token(SyntaxKind::DOT, ".");
|
self.inner.token(SyntaxKind::DOT, ".");
|
||||||
|
let range = TextRange::at(range.end(), TextSize::of("."));
|
||||||
|
self.token_map.insert(range, span);
|
||||||
|
|
||||||
if has_pseudo_dot {
|
if has_pseudo_dot {
|
||||||
assert!(right.is_empty(), "{left}.{right}");
|
assert!(right.is_empty(), "{left}.{right}");
|
||||||
|
@ -742,11 +747,14 @@ where
|
||||||
assert!(!right.is_empty(), "{left}.{right}");
|
assert!(!right.is_empty(), "{left}.{right}");
|
||||||
self.inner.start_node(SyntaxKind::NAME_REF);
|
self.inner.start_node(SyntaxKind::NAME_REF);
|
||||||
self.inner.token(SyntaxKind::INT_NUMBER, right);
|
self.inner.token(SyntaxKind::INT_NUMBER, right);
|
||||||
|
let range = TextRange::at(range.end(), TextSize::of(right));
|
||||||
|
self.token_map.insert(range, span);
|
||||||
self.inner.finish_node();
|
self.inner.finish_node();
|
||||||
|
|
||||||
// the parser creates an unbalanced start node, we are required to close it here
|
// the parser creates an unbalanced start node, we are required to close it here
|
||||||
self.inner.finish_node();
|
self.inner.finish_node();
|
||||||
}
|
}
|
||||||
|
self.text_pos += TextSize::of(text);
|
||||||
}
|
}
|
||||||
None => unreachable!(),
|
None => unreachable!(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
|
||||||
|
use stdx::never;
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
use tt::Span;
|
use tt::Span;
|
||||||
|
|
||||||
|
@ -59,11 +60,10 @@ impl<S: Span> TokenMap<S> {
|
||||||
.max_by_key(|(_, _, intersection)| intersection.len())
|
.max_by_key(|(_, _, intersection)| intersection.len())
|
||||||
.map(|(_, &s, _)| s)
|
.map(|(_, &s, _)| s)
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
if self.real_file {
|
if !self.real_file {
|
||||||
None
|
never!("no span for range {:?} in {:#?}", range, self.span_map);
|
||||||
} else {
|
|
||||||
panic!("no span for range {range:?} in {:#?}", self.span_map)
|
|
||||||
}
|
}
|
||||||
|
None
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue