Auto merge of #16819 - Veykril:span-upmapping, r=Veykril

internal: Improve rooted upmapping

cc https://github.com/rust-lang/rust-analyzer/issues/16235
This commit is contained in:
bors 2024-03-12 12:58:52 +00:00
commit a2e274142f
24 changed files with 231 additions and 153 deletions

View file

@ -10,7 +10,7 @@ use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
use crate::{
db::{self, ExpandDatabase},
map_node_range_up, span_for_offset, MacroFileIdExt,
map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt,
};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
@ -38,6 +38,9 @@ impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
}
pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange {
self.to_ptr(db).text_range()
}
pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
}
@ -49,6 +52,9 @@ impl<N: AstIdNode> AstId<N> {
pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId {
pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange {
self.to_ptr(db).text_range()
}
pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value)
}
@ -173,24 +179,8 @@ impl InFile<&SyntaxNode> {
///
/// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
self.map(SyntaxNode::text_range).original_node_file_range_rooted(db)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
@ -198,23 +188,7 @@ impl InFile<&SyntaxNode> {
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
}
/// Attempts to map the syntax node back up its macro calls.
@ -222,17 +196,10 @@ impl InFile<&SyntaxNode> {
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
}
}
self.map(SyntaxNode::text_range).original_node_file_range_opt(db)
}
pub fn original_syntax_node(
pub fn original_syntax_node_rooted(
self,
db: &dyn db::ExpandDatabase,
) -> Option<InRealFile<SyntaxNode>> {
@ -242,25 +209,21 @@ impl InFile<&SyntaxNode> {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.clone() })
}
HirFileIdRepr::MacroFile(m) => m,
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
_ => return None,
};
if !file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) =
map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?;
let FileRange { file_id, range } =
map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behavior.
if !ctx.is_root() {
return None;
}
let anc = db.parse(file_id).syntax_node().covering_element(range);
let kind = self.value.kind();
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let value = anc.ancestors().find(|it| it.kind() == kind)?;
let value = db
.parse(file_id)
.syntax_node()
.covering_element(range)
.ancestors()
.take_while(|it| it.text_range() == range)
.find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value))
}
}
@ -355,8 +318,8 @@ impl InFile<TextRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some((it, SyntaxContextId::ROOT)) => it,
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
@ -366,6 +329,24 @@ impl InFile<TextRange> {
}
}
pub fn original_node_file_range_with_macro_call_body(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
}
}
}
pub fn original_node_file_range_opt(
self,
db: &dyn db::ExpandDatabase,
@ -395,18 +376,12 @@ impl<N: AstNode> InFile<N> {
return None;
}
let (FileRange { file_id, range }, ctx) = map_node_range_up(
let FileRange { file_id, range } = map_node_range_up_rooted(
db,
&db.expansion_span_map(file_id),
self.value.syntax().text_range(),
)?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;

View file

@ -25,13 +25,16 @@ pub mod span_map;
mod cfg_process;
mod fixup;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use triomphe::Arc;
use std::{fmt, hash::Hash};
use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId};
use either::Either;
use span::{ErasedFileAstId, FileRange, HirFileIdRepr, Span, SyntaxContextData, SyntaxContextId};
use span::{
ErasedFileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData, SyntaxContextId,
};
use syntax::{
ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextRange, TextSize,
@ -683,6 +686,8 @@ impl ExpansionInfo {
}
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
///
/// Note this does a linear search through the entire backing vector of the spanmap.
pub fn map_range_down(
&self,
span: Span,
@ -793,7 +798,34 @@ impl ExpansionInfo {
}
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from only
/// considering the root spans contained.
/// Unlike [`map_node_range_up`], this will not return `None` if any anchors or syntax contexts differ.
pub fn map_node_range_up_rooted(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> Option<FileRange> {
let mut spans = exp_map.spans_for_range(range).filter(|span| span.ctx.is_root());
let Span { range, anchor, ctx: _ } = spans.next()?;
let mut start = range.start();
let mut end = range.end();
for span in spans {
if span.anchor != anchor {
return None;
}
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset })
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
///
/// this will return `None` if any anchors or syntax contexts differ.
pub fn map_node_range_up(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
@ -819,6 +851,29 @@ pub fn map_node_range_up(
))
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
pub fn map_node_range_up_aggregated(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> FxHashMap<(SpanAnchor, SyntaxContextId), TextRange> {
let mut map = FxHashMap::default();
for span in exp_map.spans_for_range(range) {
let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
*range = TextRange::new(
range.start().min(span.range.start()),
range.end().max(span.range.end()),
);
}
for ((anchor, _), range) in &mut map {
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
}
/// Looks up the span at the given offset.
pub fn span_for_offset(
db: &dyn ExpandDatabase,

View file

@ -1,5 +1,5 @@
//! Span maps for real files and macro expansions.
use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span};
use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
use syntax::{AstNode, TextRange};
use triomphe::Arc;
@ -7,7 +7,7 @@ pub use span::RealSpanMap;
use crate::db::ExpandDatabase;
pub type ExpansionSpanMap = span::SpanMap<Span>;
pub type ExpansionSpanMap = span::SpanMap<SyntaxContextId>;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]