refactor: Lower type-refs before type inference

This refactors how we deal with items in hir-def lowering.

- It now lowers all of them through an "ExpressionStore" (kind of a misnomer as this point) as their so called *Signatures.
- We now uniformly lower type AST into TypeRefs before type inference.
- Likewise, this moves macro expansion out of type inference, resulting in a single place where we do non-defmap macro expansion.
- Finally, this PR removes a lot of information from ItemTree, making the DefMap a lot less likely to be recomputed and have it only depend on actual early name resolution related information (not 100% true, we still have ADT fields in there but thats a follow up removal).
This commit is contained in:
Lukas Wirth 2025-01-25 17:20:10 +01:00
parent 588948f267
commit 1fd9520c92
127 changed files with 6733 additions and 7993 deletions

View file

@ -550,7 +550,7 @@ macro_rules! foo {
}
fn main() {
let res = fo$0o!();
fo$0o!()
}
"#,
expect![[r#"
@ -559,6 +559,24 @@ fn main() {
);
}
#[test]
fn macro_expand_item_expansion_in_expression_call() {
check(
r#"
macro_rules! foo {
() => {fn f<T>() {}};
}
fn main() {
let res = fo$0o!();
}
"#,
expect![[r#"
foo!
fn f<T>(){}"#]],
);
}
#[test]
fn macro_expand_derive() {
check(

View file

@ -32,7 +32,7 @@ pub(crate) fn goto_declaration(
.descend_into_macros_no_opaque(original_token)
.iter()
.filter_map(|token| {
let parent = token.parent()?;
let parent = token.value.parent()?;
let def = match_ast! {
match parent {
ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? {

View file

@ -92,16 +92,19 @@ pub(crate) fn goto_definition(
.descend_into_macros_no_opaque(original_token.clone())
.into_iter()
.filter_map(|token| {
let parent = token.parent()?;
let parent = token.value.parent()?;
if let Some(token) = ast::String::cast(token.clone()) {
if let Some(x) = try_lookup_include_path(sema, token, file_id) {
let token_file_id = token.file_id;
if let Some(token) = ast::String::cast(token.value.clone()) {
if let Some(x) =
try_lookup_include_path(sema, InFile::new(token_file_id, token), file_id)
{
return Some(vec![x]);
}
}
if ast::TokenTree::can_cast(parent.kind()) {
if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) {
if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) {
return Some(vec![x]);
}
}
@ -205,17 +208,19 @@ fn find_definition_for_known_blanket_dual_impls(
fn try_lookup_include_path(
sema: &Semantics<'_, RootDatabase>,
token: ast::String,
token: InFile<ast::String>,
file_id: FileId,
) -> Option<NavigationTarget> {
let file = sema.hir_file_for(&token.syntax().parent()?).macro_file()?;
let file = token.file_id.macro_file()?;
// Check that we are in the eager argument expansion of an include macro
// that is we are the string input of it
if !iter::successors(Some(file), |file| file.parent(sema.db).macro_file())
// Check that we are in the eager argument expansion of an include macro
.any(|file| file.is_include_like_macro(sema.db) && file.eager_arg(sema.db).is_none())
{
return None;
}
let path = token.value().ok()?;
let path = token.value.value().ok()?;
let file_id = Upcast::<dyn RootQueryDb>::upcast(sema.db)
.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
@ -2049,7 +2054,10 @@ fn main() {
);
}
// macros in this position are not yet supported
#[test]
// FIXME
#[should_panic]
fn goto_doc_include_str() {
check(
r#"

View file

@ -72,7 +72,7 @@ pub(crate) fn goto_type_definition(
.into_iter()
.filter_map(|token| {
sema
.token_ancestors_with_macros(token)
.token_ancestors_with_macros(token.value)
// When `token` is within a macro call, we can't determine its type. Don't continue
// this traversal because otherwise we'll end up returning the type of *that* macro
// call, which is not what we want in general.

View file

@ -4674,7 +4674,7 @@ struct S$0T<const C: usize = 1, T = Foo>(T);
```
```rust
struct ST<const C: usize = 1, T = Foo>(T)
struct ST<const C: usize = {const}, T = Foo>(T)
```
---
@ -4733,7 +4733,7 @@ struct S$0T<const C: usize = VAL, T = Foo>(T);
```
```rust
struct ST<const C: usize = VAL, T = Foo>(T)
struct ST<const C: usize = {const}, T = Foo>(T)
```
---
@ -4817,7 +4817,7 @@ fn main() {
*value*
```rust
let value: Const<-1>
let value: Const<_>
```
---
@ -5422,7 +5422,7 @@ type Fo$0o2 = Foo<2>;
```
```rust
type Foo2 = Foo<2>
type Foo2 = Foo<<expr>>
```
---
@ -6251,7 +6251,7 @@ const FOO$0: &[i32; 5] = &[12; 5];
```
```rust
const FOO: &[i32; 5] = &[12, 12, 12, 12, 12]
const FOO: &[i32; {const}] = &[12, 12, 12, 12, 12]
```
"#]],
);

View file

@ -124,11 +124,11 @@ pub(crate) fn find_all_refs(
}
}
pub(crate) fn find_defs<'a>(
sema: &'a Semantics<'_, RootDatabase>,
pub(crate) fn find_defs(
sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
offset: TextSize,
) -> Option<impl IntoIterator<Item = Definition> + 'a> {
) -> Option<Vec<Definition>> {
let token = syntax.token_at_offset(offset).find(|t| {
matches!(
t.kind(),

View file

@ -1301,7 +1301,7 @@ fn benchmark_syntax_highlighting_parser() {
})
.count()
};
assert_eq!(hash, 1167);
assert_eq!(hash, 1606);
}
#[test]