internal: Improve rooted upmapping

This commit is contained in:
Lukas Wirth 2024-03-12 13:24:52 +01:00
parent 03d2d9016d
commit 9ba4493918
24 changed files with 231 additions and 153 deletions

View file

@ -61,15 +61,16 @@ use std::ops::Deref;
use base_db::{CrateId, Edition, FileId}; use base_db::{CrateId, Edition, FileId};
use hir_expand::{ use hir_expand::{
name::Name, proc_macro::ProcMacroKind, HirFileId, InFile, MacroCallId, MacroDefId, name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId,
}; };
use itertools::Itertools; use itertools::Itertools;
use la_arena::Arena; use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::FileAstId; use span::{FileAstId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to; use stdx::format_to;
use syntax::{ast, SmolStr}; use syntax::{ast, SmolStr};
use triomphe::Arc; use triomphe::Arc;
use tt::TextRange;
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
@ -677,6 +678,25 @@ impl ModuleData {
} }
} }
pub fn definition_source_range(&self, db: &dyn DefDatabase) -> InFile<TextRange> {
match &self.origin {
&ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
InFile::new(
definition.into(),
ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID)
.to_range(db.upcast()),
)
}
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
definition_tree_id.file_id(),
AstId::new(definition_tree_id.file_id(), definition).to_range(db.upcast()),
),
ModuleOrigin::BlockExpr { block, .. } => {
InFile::new(block.file_id, block.to_range(db.upcast()))
}
}
}
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root or block. /// `None` for the crate root or block.
pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> { pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
@ -684,6 +704,13 @@ impl ModuleData {
let value = decl.to_node(db.upcast()); let value = decl.to_node(db.upcast());
Some(InFile { file_id: decl.file_id, value }) Some(InFile { file_id: decl.file_id, value })
} }
/// Returns the range which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root or block.
pub fn declaration_source_range(&self, db: &dyn DefDatabase) -> Option<InFile<TextRange>> {
let decl = self.origin.declaration()?;
Some(InFile { file_id: decl.file_id, value: decl.to_range(db.upcast()) })
}
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -10,7 +10,7 @@ use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
use crate::{ use crate::{
db::{self, ExpandDatabase}, db::{self, ExpandDatabase},
map_node_range_up, span_for_offset, MacroFileIdExt, map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt,
}; };
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree. /// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
@ -38,6 +38,9 @@ impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> N { pub fn to_node(&self, db: &dyn ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
} }
pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange {
self.to_ptr(db).text_range()
}
pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile<N> { pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
} }
@ -49,6 +52,9 @@ impl<N: AstIdNode> AstId<N> {
pub type ErasedAstId = crate::InFile<ErasedFileAstId>; pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId { impl ErasedAstId {
pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange {
self.to_ptr(db).text_range()
}
pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr { pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value) db.ast_id_map(self.file_id).get_erased(self.value)
} }
@ -173,24 +179,8 @@ impl InFile<&SyntaxNode> {
/// ///
/// For attributes and derives, this will point back to the attribute only. /// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`]. /// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() { self.map(SyntaxNode::text_range).original_node_file_range_rooted(db)
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
} }
/// Falls back to the macro call range if the node cannot be mapped up fully. /// Falls back to the macro call range if the node cannot be mapped up fully.
@ -198,23 +188,7 @@ impl InFile<&SyntaxNode> {
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> FileRange { ) -> FileRange {
match self.file_id.repr() { self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
} }
/// Attempts to map the syntax node back up its macro calls. /// Attempts to map the syntax node back up its macro calls.
@ -222,17 +196,10 @@ impl InFile<&SyntaxNode> {
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> { ) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() { self.map(SyntaxNode::text_range).original_node_file_range_opt(db)
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
}
}
} }
pub fn original_syntax_node( pub fn original_syntax_node_rooted(
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
) -> Option<InRealFile<SyntaxNode>> { ) -> Option<InRealFile<SyntaxNode>> {
@ -242,25 +209,21 @@ impl InFile<&SyntaxNode> {
HirFileIdRepr::FileId(file_id) => { HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.clone() }) return Some(InRealFile { file_id, value: self.value.clone() })
} }
HirFileIdRepr::MacroFile(m) => m, HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
_ => return None,
}; };
if !file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) = let FileRange { file_id, range } =
map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?; map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behavior.
if !ctx.is_root() {
return None;
}
let anc = db.parse(file_id).syntax_node().covering_element(range);
let kind = self.value.kind(); let kind = self.value.kind();
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? let value = db
let value = anc.ancestors().find(|it| it.kind() == kind)?; .parse(file_id)
.syntax_node()
.covering_element(range)
.ancestors()
.take_while(|it| it.text_range() == range)
.find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value)) Some(InRealFile::new(file_id, value))
} }
} }
@ -355,8 +318,8 @@ impl InFile<TextRange> {
match self.file_id.repr() { match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => { HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) { match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some((it, SyntaxContextId::ROOT)) => it, Some(it) => it,
_ => { _ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db) loc.kind.original_call_range(db)
@ -366,6 +329,24 @@ impl InFile<TextRange> {
} }
} }
pub fn original_node_file_range_with_macro_call_body(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
}
}
}
pub fn original_node_file_range_opt( pub fn original_node_file_range_opt(
self, self,
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
@ -395,18 +376,12 @@ impl<N: AstNode> InFile<N> {
return None; return None;
} }
let (FileRange { file_id, range }, ctx) = map_node_range_up( let FileRange { file_id, range } = map_node_range_up_rooted(
db, db,
&db.expansion_span_map(file_id), &db.expansion_span_map(file_id),
self.value.syntax().text_range(), self.value.syntax().text_range(),
)?; )?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range); let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?; let value = anc.ancestors().find_map(N::cast)?;

View file

@ -25,13 +25,16 @@ pub mod span_map;
mod fixup; mod fixup;
use attrs::collect_attrs; use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use triomphe::Arc; use triomphe::Arc;
use std::{fmt, hash::Hash}; use std::{fmt, hash::Hash};
use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId}; use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId};
use either::Either; use either::Either;
use span::{ErasedFileAstId, FileRange, HirFileIdRepr, Span, SyntaxContextData, SyntaxContextId}; use span::{
ErasedFileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData, SyntaxContextId,
};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextRange, TextSize, SyntaxNode, SyntaxToken, TextRange, TextSize,
@ -683,6 +686,8 @@ impl ExpansionInfo {
} }
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
///
/// Note this does a linear search through the entire backing vector of the spanmap.
pub fn map_range_down( pub fn map_range_down(
&self, &self,
span: Span, span: Span,
@ -793,7 +798,34 @@ impl ExpansionInfo {
} }
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from only
/// considering the root spans contained.
/// Unlike [`map_node_range_up`], this will not return `None` if any anchors or syntax contexts differ.
pub fn map_node_range_up_rooted(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> Option<FileRange> {
let mut spans = exp_map.spans_for_range(range).filter(|span| span.ctx.is_root());
let Span { range, anchor, ctx: _ } = spans.next()?;
let mut start = range.start();
let mut end = range.end();
for span in spans {
if span.anchor != anchor {
return None;
}
start = start.min(span.range.start());
end = end.max(span.range.end());
}
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset })
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from. /// Maps up the text range out of the expansion hierarchy back into the original file its from.
///
/// this will return `None` if any anchors or syntax contexts differ.
pub fn map_node_range_up( pub fn map_node_range_up(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap, exp_map: &ExpansionSpanMap,
@ -819,6 +851,29 @@ pub fn map_node_range_up(
)) ))
} }
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
pub fn map_node_range_up_aggregated(
db: &dyn ExpandDatabase,
exp_map: &ExpansionSpanMap,
range: TextRange,
) -> FxHashMap<(SpanAnchor, SyntaxContextId), TextRange> {
let mut map = FxHashMap::default();
for span in exp_map.spans_for_range(range) {
let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
*range = TextRange::new(
range.start().min(span.range.start()),
range.end().max(span.range.end()),
);
}
for ((anchor, _), range) in &mut map {
let anchor_offset =
db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
*range += anchor_offset;
}
map
}
/// Looks up the span at the given offset. /// Looks up the span at the given offset.
pub fn span_for_offset( pub fn span_for_offset(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,

View file

@ -1,5 +1,5 @@
//! Span maps for real files and macro expansions. //! Span maps for real files and macro expansions.
use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span}; use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
use syntax::{AstNode, TextRange}; use syntax::{AstNode, TextRange};
use triomphe::Arc; use triomphe::Arc;
@ -7,7 +7,7 @@ pub use span::RealSpanMap;
use crate::db::ExpandDatabase; use crate::db::ExpandDatabase;
pub type ExpansionSpanMap = span::SpanMap<Span>; pub type ExpansionSpanMap = span::SpanMap<SyntaxContextId>;
/// Spanmap for a macro file or a real file /// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]

View file

@ -164,7 +164,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
Some(value) => value, Some(value) => value,
None => continue, None => continue,
}; };
let range = node.as_ref().original_file_range(&db); let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) { if let Some(expected) = types.remove(&range) {
let actual = if display_source { let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap() ty.display_source_code(&db, def.module(&db), true).unwrap()
@ -180,7 +180,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
Some(value) => value, Some(value) => value,
None => continue, None => continue,
}; };
let range = node.as_ref().original_file_range(&db); let range = node.as_ref().original_file_range_rooted(&db);
if let Some(expected) = types.remove(&range) { if let Some(expected) = types.remove(&range) {
let actual = if display_source { let actual = if display_source {
ty.display_source_code(&db, def.module(&db), true).unwrap() ty.display_source_code(&db, def.module(&db), true).unwrap()
@ -211,7 +211,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
}) else { }) else {
continue; continue;
}; };
let range = node.as_ref().original_file_range(&db); let range = node.as_ref().original_file_range_rooted(&db);
let actual = format!( let actual = format!(
"expected {}, got {}", "expected {}, got {}",
mismatch.expected.display_test(&db), mismatch.expected.display_test(&db),

View file

@ -9,6 +9,7 @@ use hir_def::{
}; };
use hir_expand::{HirFileId, InFile}; use hir_expand::{HirFileId, InFile};
use syntax::ast; use syntax::ast;
use tt::TextRange;
use crate::{ use crate::{
db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl, db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
@ -37,6 +38,12 @@ impl Module {
def_map[self.id.local_id].definition_source(db.upcast()) def_map[self.id.local_id].definition_source(db.upcast())
} }
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source_range(self, db: &dyn HirDatabase) -> InFile<TextRange> {
let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].definition_source_range(db.upcast())
}
pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId { pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].definition_source_file_id() def_map[self.id.local_id].definition_source_file_id()
@ -71,6 +78,13 @@ impl Module {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].declaration_source(db.upcast()) def_map[self.id.local_id].declaration_source(db.upcast())
} }
/// Returns a text range which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root.
pub fn declaration_source_range(self, db: &dyn HirDatabase) -> Option<InFile<TextRange>> {
let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].declaration_source_range(db.upcast())
}
} }
impl HasSource for Field { impl HasSource for Field {

View file

@ -960,7 +960,7 @@ impl<'db> SemanticsImpl<'db> {
/// macro file the node resides in. /// macro file the node resides in.
pub fn original_range(&self, node: &SyntaxNode) -> FileRange { pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
let node = self.find_file(node); let node = self.find_file(node);
node.original_file_range(self.db.upcast()) node.original_file_range_rooted(self.db.upcast())
} }
/// Attempts to map the node out of macro expanded files returning the original file range. /// Attempts to map the node out of macro expanded files returning the original file range.
@ -984,9 +984,9 @@ impl<'db> SemanticsImpl<'db> {
/// Attempts to map the node out of macro expanded files. /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input. /// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> { pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node); let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( InFile::new(file_id, node).original_syntax_node_rooted(self.db.upcast()).map(
|InRealFile { file_id, value }| { |InRealFile { file_id, value }| {
self.cache(find_root(&value), file_id.into()); self.cache(find_root(&value), file_id.into());
value value
@ -997,7 +997,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange { pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
let root = self.parse_or_expand(src.file_id); let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root)); let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range(self.db.upcast()) node.as_ref().original_file_range_rooted(self.db.upcast())
} }
fn token_ancestors_with_macros( fn token_ancestors_with_macros(

View file

@ -49,7 +49,7 @@ impl DeclarationLocation {
return FileRange { file_id, range: self.ptr.text_range() }; return FileRange { file_id, range: self.ptr.text_range() };
} }
let node = resolve_node(db, self.hir_file_id, &self.ptr); let node = resolve_node(db, self.hir_file_id, &self.ptr);
node.as_ref().original_file_range(db.upcast()) node.as_ref().original_file_range_rooted(db.upcast())
} }
} }

View file

@ -198,7 +198,7 @@ fn get_adt_source(
adt: &hir::Adt, adt: &hir::Adt,
fn_name: &str, fn_name: &str,
) -> Option<(Option<ast::Impl>, FileId)> { ) -> Option<(Option<ast::Impl>, FileId)> {
let range = adt.source(ctx.sema.db)?.syntax().original_file_range(ctx.sema.db); let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
let file = ctx.sema.parse(range.file_id); let file = ctx.sema.parse(range.file_id);
let adt_source = let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;

View file

@ -206,7 +206,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let fn_body = fn_source.value.body()?; let fn_body = fn_source.value.body()?;
let param_list = fn_source.value.param_list()?; let param_list = fn_source.value.param_list()?;
let FileRange { file_id, range } = fn_source.syntax().original_file_range(ctx.sema.db); let FileRange { file_id, range } = fn_source.syntax().original_file_range_rooted(ctx.sema.db);
if file_id == ctx.file_id() && range.contains(ctx.offset()) { if file_id == ctx.file_id() && range.contains(ctx.offset()) {
cov_mark::hit!(inline_call_recursive); cov_mark::hit!(inline_call_recursive);
return None; return None;

View file

@ -96,7 +96,7 @@ fn complete_trait_impl_name(
.parent() .parent()
} }
}?; }?;
let item = ctx.sema.original_syntax_node(&item)?; let item = ctx.sema.original_syntax_node_rooted(&item)?;
// item -> ASSOC_ITEM_LIST -> IMPL // item -> ASSOC_ITEM_LIST -> IMPL
let impl_def = ast::Impl::cast(item.parent()?.parent()?)?; let impl_def = ast::Impl::cast(item.parent()?.parent()?)?;
let replacement_range = { let replacement_range = {

View file

@ -2,7 +2,7 @@
use std::iter; use std::iter;
use hir::{HirFileIdExt, Module, ModuleSource}; use hir::{HirFileIdExt, Module};
use ide_db::{ use ide_db::{
base_db::{SourceDatabaseExt, VfsPath}, base_db::{SourceDatabaseExt, VfsPath},
FxHashSet, RootDatabase, SymbolKind, FxHashSet, RootDatabase, SymbolKind,
@ -57,7 +57,7 @@ pub(crate) fn complete_mod(
.collect::<FxHashSet<_>>(); .collect::<FxHashSet<_>>();
let module_declaration_file = let module_declaration_file =
current_module.declaration_source(ctx.db).map(|module_declaration_source_file| { current_module.declaration_source_range(ctx.db).map(|module_declaration_source_file| {
module_declaration_source_file.file_id.original_file(ctx.db) module_declaration_source_file.file_id.original_file(ctx.db)
}); });
@ -148,9 +148,7 @@ fn module_chain_to_containing_module_file(
) -> Vec<Module> { ) -> Vec<Module> {
let mut path = let mut path =
iter::successors(Some(current_module), |current_module| current_module.parent(db)) iter::successors(Some(current_module), |current_module| current_module.parent(db))
.take_while(|current_module| { .take_while(|current_module| current_module.is_inline(db))
matches!(current_module.definition_source(db).value, ModuleSource::Module(_))
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
path.reverse(); path.reverse();
path path

View file

@ -71,7 +71,7 @@ pub fn visit_file_defs(
let mut defs: VecDeque<_> = module.declarations(db).into(); let mut defs: VecDeque<_> = module.declarations(db).into();
while let Some(def) = defs.pop_front() { while let Some(def) = defs.pop_front() {
if let ModuleDef::Module(submodule) = def { if let ModuleDef::Module(submodule) = def {
if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value { if submodule.is_inline(db) {
defs.extend(submodule.declarations(db)); defs.extend(submodule.declarations(db));
submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
} }

View file

@ -190,22 +190,15 @@ impl SearchScope {
let mut entries = IntMap::default(); let mut entries = IntMap::default();
let (file_id, range) = { let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db); let InFile { file_id, value } = module.definition_source_range(db);
if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db)
{ {
(file_id, Some(call_source.text_range())) (file_id, Some(call_source.text_range()))
} else { } else {
( (file_id.original_file(db), Some(value))
file_id.original_file(db),
match value {
ModuleSource::SourceFile(_) => None,
ModuleSource::Module(it) => Some(it.syntax().text_range()),
ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
},
)
} }
}; };
entries.insert(file_id, range); entries.entry(file_id).or_insert(range);
let mut to_visit: Vec<_> = module.children(db).collect(); let mut to_visit: Vec<_> = module.children(db).collect();
while let Some(module) = to_visit.pop() { while let Some(module) = to_visit.pop() {

View file

@ -38,7 +38,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Ass
let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?; let def = NameClass::classify(&ctx.sema, &name_node)?.defined()?;
let name_node = InFile::new(d.file, name_node.syntax()); let name_node = InFile::new(d.file, name_node.syntax());
let frange = name_node.original_file_range(ctx.sema.db); let frange = name_node.original_file_range_rooted(ctx.sema.db);
let label = format!("Rename to {}", d.suggested_text); let label = format!("Rename to {}", d.suggested_text);
let mut res = unresolved_fix("change_case", &label, frange.range); let mut res = unresolved_fix("change_case", &label, frange.range);

View file

@ -114,10 +114,8 @@ fn add_variant_to_union(
) -> Option<Assist> { ) -> Option<Assist> {
let adt_source = adt_union.source(ctx.sema.db)?; let adt_source = adt_union.source(ctx.sema.db)?;
let adt_syntax = adt_source.syntax(); let adt_syntax = adt_source.syntax();
let Some(field_list) = adt_source.value.record_field_list() else { let field_list = adt_source.value.record_field_list()?;
return None; let range = adt_syntax.original_file_range_rooted(ctx.sema.db);
};
let range = adt_syntax.original_file_range(ctx.sema.db);
let field_name = make::name(field_name); let field_name = make::name(field_name);
let (offset, record_field) = let (offset, record_field) =
@ -144,7 +142,7 @@ fn add_field_to_struct_fix(
) -> Option<Assist> { ) -> Option<Assist> {
let struct_source = adt_struct.source(ctx.sema.db)?; let struct_source = adt_struct.source(ctx.sema.db)?;
let struct_syntax = struct_source.syntax(); let struct_syntax = struct_source.syntax();
let struct_range = struct_syntax.original_file_range(ctx.sema.db); let struct_range = struct_syntax.original_file_range_rooted(ctx.sema.db);
let field_list = struct_source.value.field_list(); let field_list = struct_source.value.field_list();
match field_list { match field_list {
Some(FieldList::RecordFieldList(field_list)) => { Some(FieldList::RecordFieldList(field_list)) => {

View file

@ -125,9 +125,12 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
let match_state = Matcher { sema, restrict_range: *restrict_range, rule }; let match_state = Matcher { sema, restrict_range: *restrict_range, rule };
// First pass at matching, where we check that node types and idents match. // First pass at matching, where we check that node types and idents match.
match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?; match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
match_state.validate_range(&sema.original_range(code))?; let file_range = sema
.original_range_opt(code)
.ok_or(MatchFailed { reason: Some("def site definition".to_owned()) })?;
match_state.validate_range(&file_range)?;
let mut the_match = Match { let mut the_match = Match {
range: sema.original_range(code), range: file_range,
matched_node: code.clone(), matched_node: code.clone(),
placeholder_values: FxHashMap::default(), placeholder_values: FxHashMap::default(),
ignored_comments: Vec::new(), ignored_comments: Vec::new(),
@ -175,7 +178,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
self.check_constraint(constraint, code)?; self.check_constraint(constraint, code)?;
} }
if let Phase::Second(matches_out) = phase { if let Phase::Second(matches_out) = phase {
let original_range = self.sema.original_range(code); let original_range = self
.sema
.original_range_opt(code)
.ok_or(MatchFailed { reason: Some("def site definition".to_owned()) })?;
// We validated the range for the node when we started the match, so the placeholder // We validated the range for the node when we started the match, so the placeholder
// probably can't fail range validation, but just to be safe... // probably can't fail range validation, but just to be safe...
self.validate_range(&original_range)?; self.validate_range(&original_range)?;
@ -487,7 +493,13 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
match_out.placeholder_values.insert( match_out.placeholder_values.insert(
placeholder.ident.clone(), placeholder.ident.clone(),
PlaceholderMatch::from_range(FileRange { PlaceholderMatch::from_range(FileRange {
file_id: self.sema.original_range(code).file_id, file_id: self
.sema
.original_range_opt(code)
.ok_or(MatchFailed {
reason: Some("def site definition".to_owned()),
})?
.file_id,
range: first_matched_token range: first_matched_token
.text_range() .text_range()
.cover(last_matched_token.text_range()), .cover(last_matched_token.text_range()),

View file

@ -190,12 +190,9 @@ impl MatchFinder<'_> {
// When matching within a macro expansion, we only want to allow matches of // When matching within a macro expansion, we only want to allow matches of
// nodes that originated entirely from within the token tree of the macro call. // nodes that originated entirely from within the token tree of the macro call.
// i.e. we don't want to match something that came from the macro itself. // i.e. we don't want to match something that came from the macro itself.
self.slow_scan_node( if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
&expanded, self.slow_scan_node(&expanded, rule, &Some(range), matches_out);
rule, }
&Some(self.sema.original_range(tt.syntax())),
matches_out,
);
} }
} }
} }
@ -227,7 +224,7 @@ impl MatchFinder<'_> {
// There is no range restriction. // There is no range restriction.
return true; return true;
} }
let node_range = self.sema.original_range(code); let Some(node_range) = self.sema.original_range_opt(code) else { return false };
for range in &self.restrict_ranges { for range in &self.restrict_ranges {
if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) { if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
return true; return true;

View file

@ -852,7 +852,7 @@ fn orig_range(
value: &SyntaxNode, value: &SyntaxNode,
) -> UpmappingResult<(FileRange, Option<TextRange>)> { ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
UpmappingResult { UpmappingResult {
call_site: (InFile::new(hir_file, value).original_file_range(db), None), call_site: (InFile::new(hir_file, value).original_file_range_rooted(db), None),
def_site: None, def_site: None,
} }
} }

View file

@ -138,7 +138,9 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
}) { }) {
if let Some(def) = def { if let Some(def) = def {
let file_id = match def { let file_id = match def {
Definition::Module(it) => it.declaration_source(db).map(|src| src.file_id), Definition::Module(it) => {
it.declaration_source_range(db).map(|src| src.file_id)
}
Definition::Function(it) => it.source(db).map(|src| src.file_id), Definition::Function(it) => it.source(db).map(|src| src.file_id),
_ => None, _ => None,
}; };
@ -269,15 +271,10 @@ fn find_related_tests_in_module(
Some(it) => it, Some(it) => it,
_ => return, _ => return,
}; };
let mod_source = parent_module.definition_source(sema.db); let mod_source = parent_module.definition_source_range(sema.db);
let range = match &mod_source.value {
hir::ModuleSource::Module(m) => m.syntax().text_range(),
hir::ModuleSource::BlockExpr(b) => b.syntax().text_range(),
hir::ModuleSource::SourceFile(f) => f.syntax().text_range(),
};
let file_id = mod_source.file_id.original_file(sema.db); let file_id = mod_source.file_id.original_file(sema.db);
let mod_scope = SearchScope::file_range(FileRange { file_id, range }); let mod_scope = SearchScope::file_range(FileRange { file_id, range: mod_source.value });
let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() }; let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() };
find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests) find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
} }
@ -405,14 +402,15 @@ fn runnable_mod_outline_definition(
let attrs = def.attrs(sema.db); let attrs = def.attrs(sema.db);
let cfg = attrs.cfg(); let cfg = attrs.cfg();
match def.definition_source(sema.db).value { if def.as_source_file_id(sema.db).is_some() {
hir::ModuleSource::SourceFile(_) => Some(Runnable { Some(Runnable {
use_name_in_title: false, use_name_in_title: false,
nav: def.to_nav(sema.db).call_site(), nav: def.to_nav(sema.db).call_site(),
kind: RunnableKind::TestMod { path }, kind: RunnableKind::TestMod { path },
cfg, cfg,
}), })
_ => None, } else {
None
} }
} }

View file

@ -23,8 +23,11 @@ pub trait SpanMapper<S: Span> {
fn span_for(&self, range: TextRange) -> S; fn span_for(&self, range: TextRange) -> S;
} }
impl<S: Span> SpanMapper<S> for SpanMap<S> { impl<S> SpanMapper<SpanData<S>> for SpanMap<S>
fn span_for(&self, range: TextRange) -> S { where
SpanData<S>: Span,
{
fn span_for(&self, range: TextRange) -> SpanData<S> {
self.span_at(range.start()) self.span_at(range.start())
} }
} }
@ -121,7 +124,7 @@ where
pub fn token_tree_to_syntax_node<Ctx>( pub fn token_tree_to_syntax_node<Ctx>(
tt: &tt::Subtree<SpanData<Ctx>>, tt: &tt::Subtree<SpanData<Ctx>>,
entry_point: parser::TopEntryPoint, entry_point: parser::TopEntryPoint,
) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>) ) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Span,
Ctx: Copy, Ctx: Copy,
@ -824,7 +827,7 @@ where
cursor: Cursor<'a, SpanData<Ctx>>, cursor: Cursor<'a, SpanData<Ctx>>,
text_pos: TextSize, text_pos: TextSize,
inner: SyntaxTreeBuilder, inner: SyntaxTreeBuilder,
token_map: SpanMap<SpanData<Ctx>>, token_map: SpanMap<Ctx>,
} }
impl<'a, Ctx> TtTreeSink<'a, Ctx> impl<'a, Ctx> TtTreeSink<'a, Ctx>
@ -841,7 +844,7 @@ where
} }
} }
fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>) { fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<Ctx>) {
self.token_map.finish(); self.token_map.finish();
(self.inner.finish(), self.token_map) (self.inner.finish(), self.token_map)
} }

View file

@ -1053,7 +1053,7 @@ fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id:
}; };
let root = db.parse_or_expand(src.file_id); let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone()); let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db); let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id); let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id); let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range; let text_range = original_range.range;
@ -1069,7 +1069,7 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa
}; };
let root = db.parse_or_expand(src.file_id); let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone()); let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db); let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id); let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id); let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range; let text_range = original_range.range;
@ -1088,7 +1088,7 @@ fn expr_syntax_range<'a>(
if let Ok(src) = src { if let Ok(src) = src {
let root = db.parse_or_expand(src.file_id); let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone()); let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db); let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id); let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id); let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range; let text_range = original_range.range;
@ -1109,7 +1109,7 @@ fn pat_syntax_range<'a>(
if let Ok(src) = src { if let Ok(src) = src {
let root = db.parse_or_expand(src.file_id); let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone()); let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db); let original_range = node.as_ref().original_file_range_rooted(db);
let path = vfs.file_path(original_range.file_id); let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id); let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range; let text_range = original_range.range;

View file

@ -56,6 +56,12 @@ pub struct SpanData<Ctx> {
pub ctx: Ctx, pub ctx: Ctx,
} }
impl<Ctx: Copy> SpanData<Ctx> {
pub fn eq_ignoring_ctx(self, other: Self) -> bool {
self.anchor == other.anchor && self.range == other.range
}
}
impl Span { impl Span {
#[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"] #[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"]
pub const DUMMY: Self = SpanData { pub const DUMMY: Self = SpanData {

View file

@ -7,17 +7,20 @@ use stdx::{always, itertools::Itertools};
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
use vfs::FileId; use vfs::FileId;
use crate::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use crate::{
ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID,
};
/// Maps absolute text ranges for the corresponding file to the relevant span data. /// Maps absolute text ranges for the corresponding file to the relevant span data.
#[derive(Debug, PartialEq, Eq, Clone, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct SpanMap<S> { pub struct SpanMap<S> {
spans: Vec<(TextSize, S)>, spans: Vec<(TextSize, SpanData<S>)>,
// FIXME: Should be
// spans: Vec<(TextSize, crate::SyntaxContextId)>,
} }
impl<S: Copy> SpanMap<S> { impl<S> SpanMap<S>
where
SpanData<S>: Copy,
{
/// Creates a new empty [`SpanMap`]. /// Creates a new empty [`SpanMap`].
pub fn empty() -> Self { pub fn empty() -> Self {
Self { spans: Vec::new() } Self { spans: Vec::new() }
@ -34,7 +37,7 @@ impl<S: Copy> SpanMap<S> {
} }
/// Pushes a new span onto the [`SpanMap`]. /// Pushes a new span onto the [`SpanMap`].
pub fn push(&mut self, offset: TextSize, span: S) { pub fn push(&mut self, offset: TextSize, span: SpanData<S>) {
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
if let Some(&(last_offset, _)) = self.spans.last() { if let Some(&(last_offset, _)) = self.spans.last() {
assert!( assert!(
@ -49,13 +52,12 @@ impl<S: Copy> SpanMap<S> {
/// Returns all [`TextRange`]s that correspond to the given span. /// Returns all [`TextRange`]s that correspond to the given span.
/// ///
/// Note this does a linear search through the entire backing vector. /// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
where where
S: Eq, S: Copy,
{ {
// FIXME: This should ignore the syntax context!
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
if s != span { if !s.eq_ignoring_ctx(span) {
return None; return None;
} }
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0); let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
@ -64,21 +66,21 @@ impl<S: Copy> SpanMap<S> {
} }
/// Returns the span at the given position. /// Returns the span at the given position.
pub fn span_at(&self, offset: TextSize) -> S { pub fn span_at(&self, offset: TextSize) -> SpanData<S> {
let entry = self.spans.partition_point(|&(it, _)| it <= offset); let entry = self.spans.partition_point(|&(it, _)| it <= offset);
self.spans[entry].1 self.spans[entry].1
} }
/// Returns the spans associated with the given range. /// Returns the spans associated with the given range.
/// In other words, this will return all spans that correspond to all offsets within the given range. /// In other words, this will return all spans that correspond to all offsets within the given range.
pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = S> + '_ { pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = SpanData<S>> + '_ {
let (start, end) = (range.start(), range.end()); let (start, end) = (range.start(), range.end());
let start_entry = self.spans.partition_point(|&(it, _)| it <= start); let start_entry = self.spans.partition_point(|&(it, _)| it <= start);
let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong? let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong?
self.spans[start_entry..][..end_entry].iter().map(|&(_, s)| s) self.spans[start_entry..][..end_entry].iter().map(|&(_, s)| s)
} }
pub fn iter(&self) -> impl Iterator<Item = (TextSize, S)> + '_ { pub fn iter(&self) -> impl Iterator<Item = (TextSize, SpanData<S>)> + '_ {
self.spans.iter().copied() self.spans.iter().copied()
} }
} }