Rewrite attribute handling

Basically, we switch to expanding cfg_attr in AST form, filter irrelevant attributes from the item tree, and move hir-def attributes (non-item-tree) to be flag-based.

The main motivation is memory usage, although this also simplifies the code, and fixes some bugs around handling of `cfg_attr`s.
This commit is contained in:
Chayim Refael Friedman 2025-07-27 20:17:10 +03:00
parent 57875bdce3
commit 455ca02f17
137 changed files with 4999 additions and 3851 deletions

View file

@ -5,8 +5,10 @@
// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
use std::borrow::Cow;
use crate::RootDatabase;
use crate::documentation::{DocsRangeMap, Documentation, HasDocs};
use crate::documentation::{Documentation, HasDocs};
use crate::famous_defs::FamousDefs;
use arrayvec::ArrayVec;
use either::Either;
@ -21,7 +23,7 @@ use hir::{
use span::Edition;
use stdx::{format_to, impl_from};
use syntax::{
SyntaxKind, SyntaxNode, SyntaxToken, TextSize,
SyntaxKind, SyntaxNode, SyntaxToken,
ast::{self, AstNode},
match_ast,
};
@ -199,21 +201,25 @@ impl Definition {
Some(name)
}
pub fn docs(
pub fn docs<'db>(
&self,
db: &RootDatabase,
db: &'db RootDatabase,
famous_defs: Option<&FamousDefs<'_, '_>>,
display_target: DisplayTarget,
) -> Option<Documentation> {
self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs)
) -> Option<Documentation<'db>> {
self.docs_with_rangemap(db, famous_defs, display_target).map(|docs| match docs {
Either::Left(Cow::Borrowed(docs)) => Documentation::new_borrowed(docs.docs()),
Either::Left(Cow::Owned(docs)) => Documentation::new_owned(docs.into_docs()),
Either::Right(docs) => docs,
})
}
pub fn docs_with_rangemap(
pub fn docs_with_rangemap<'db>(
&self,
db: &RootDatabase,
db: &'db RootDatabase,
famous_defs: Option<&FamousDefs<'_, '_>>,
display_target: DisplayTarget,
) -> Option<(Documentation, Option<DocsRangeMap>)> {
) -> Option<Either<Cow<'db, hir::Docs>, Documentation<'db>>> {
let docs = match self {
Definition::Macro(it) => it.docs_with_rangemap(db),
Definition::Field(it) => it.docs_with_rangemap(db),
@ -229,15 +235,13 @@ impl Definition {
it.docs_with_rangemap(db).or_else(|| {
// docs are missing, try to fall back to the docs of the aliased item.
let adt = it.ty(db).as_adt()?;
let (docs, range_map) = adt.docs_with_rangemap(db)?;
let mut docs = adt.docs_with_rangemap(db)?.into_owned();
let header_docs = format!(
"*This is the documentation for* `{}`\n\n",
adt.display(db, display_target)
);
let offset = TextSize::new(header_docs.len() as u32);
let range_map = range_map.shift_docstring_line_range(offset);
let docs = header_docs + docs.as_str();
Some((Documentation::new(docs), range_map))
docs.prepend_str(&header_docs);
Some(Cow::Owned(docs))
})
}
Definition::BuiltinType(it) => {
@ -246,7 +250,7 @@ impl Definition {
let primitive_mod =
format!("prim_{}", it.name().display(fd.0.db, display_target.edition));
let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?;
doc_owner.docs_with_rangemap(fd.0.db)
doc_owner.docs_with_rangemap(db)
})
}
Definition::BuiltinLifetime(StaticLifetime) => None,
@ -282,7 +286,7 @@ impl Definition {
);
}
return Some((Documentation::new(docs.replace('*', "\\*")), None));
return Some(Either::Right(Documentation::new_owned(docs.replace('*', "\\*"))));
}
Definition::ToolModule(_) => None,
Definition::DeriveHelper(_) => None,
@ -299,7 +303,7 @@ impl Definition {
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
item.docs_with_rangemap(db)
})
.map(|(docs, range_map)| (docs, Some(range_map)))
.map(Either::Left)
}
pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String {

View file

@ -1,337 +1,100 @@
//! Documentation attribute related utilities.
use either::Either;
use hir::{
AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
db::{DefDatabase, HirDatabase},
resolve_doc_path_on, sym,
};
use itertools::Itertools;
use span::{TextRange, TextSize};
use syntax::{
AstToken,
ast::{self, IsString},
};
use std::borrow::Cow;
use hir::{HasAttrs, db::HirDatabase, resolve_doc_path_on};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Documentation(String);
pub struct Documentation<'db>(Cow<'db, str>);
impl Documentation {
pub fn new(s: String) -> Self {
Documentation(s)
impl<'db> Documentation<'db> {
#[inline]
pub fn new_owned(s: String) -> Self {
Documentation(Cow::Owned(s))
}
#[inline]
pub fn new_borrowed(s: &'db str) -> Self {
Documentation(Cow::Borrowed(s))
}
#[inline]
pub fn into_owned(self) -> Documentation<'static> {
Documentation::new_owned(self.0.into_owned())
}
#[inline]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<Documentation> for String {
fn from(Documentation(string): Documentation) -> Self {
string
pub trait HasDocs: HasAttrs + Copy {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
let docs = match self.docs_with_rangemap(db)? {
Cow::Borrowed(docs) => Documentation::new_borrowed(docs.docs()),
Cow::Owned(docs) => Documentation::new_owned(docs.into_docs()),
};
Some(docs)
}
}
pub trait HasDocs: HasAttrs {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>;
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef>;
}
/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
#[derive(Debug)]
pub struct DocsRangeMap {
source_map: AttrSourceMap,
// (docstring-line-range, attr_index, attr-string-range)
// a mapping from the text range of a line of the [`Documentation`] to the attribute index and
// the original (untrimmed) syntax doc line
mapping: Vec<(TextRange, AttrId, TextRange)>,
}
impl DocsRangeMap {
/// Maps a [`TextRange`] relative to the documentation string back to its AST range
pub fn map(&self, range: TextRange) -> Option<(InFile<TextRange>, AttrId)> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
return None;
}
let relative_range = range - line_docs_range.start();
let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
match source {
Either::Left(attr) => {
let string = get_doc_string_in_attr(attr)?;
let text_range = string.open_quote_text_range()?;
let range = TextRange::at(
text_range.end() + original_line_src_range.start() + relative_range.start(),
string.syntax().text_range().len().min(range.len()),
);
Some((InFile { file_id, value: range }, idx))
}
Either::Right(comment) => {
let text_range = comment.syntax().text_range();
let range = TextRange::at(
text_range.start()
+ TextSize::try_from(comment.prefix().len()).ok()?
+ original_line_src_range.start()
+ relative_range.start(),
text_range.len().min(range.len()),
);
Some((InFile { file_id, value: range }, idx))
}
}
}
pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap {
let mapping = self
.mapping
.into_iter()
.map(|(buf_offset, id, base_offset)| {
let buf_offset = buf_offset.checked_add(offset).unwrap();
(buf_offset, id, base_offset)
})
.collect_vec();
DocsRangeMap { source_map: self.source_map, mapping }
}
}
pub fn docs_with_rangemap(
db: &dyn DefDatabase,
attrs: &AttrsWithOwner,
) -> Option<(Documentation, DocsRangeMap)> {
let docs = attrs
.by_key(sym::doc)
.attrs()
.filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
let indent = doc_indent(attrs);
let mut buf = String::new();
let mut mapping = Vec::new();
for (doc, idx) in docs {
if !doc.is_empty() {
let mut base_offset = 0;
for raw_line in doc.split('\n') {
let line = raw_line.trim_end();
let line_len = line.len();
let (offset, line) = match line.char_indices().nth(indent) {
Some((offset, _)) => (offset, &line[offset..]),
None => (0, line),
};
let buf_offset = buf.len();
buf.push_str(line);
mapping.push((
TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
idx,
TextRange::at(
(base_offset + offset).try_into().ok()?,
line_len.try_into().ok()?,
),
));
buf.push('\n');
base_offset += raw_line.len() + 1;
}
} else {
buf.push('\n');
}
}
buf.pop();
if buf.is_empty() {
None
} else {
Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) }))
}
}
pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape());
let indent = doc_indent(attrs);
let mut buf = String::new();
for doc in docs {
// str::lines doesn't yield anything for the empty string
if !doc.is_empty() {
// We don't trim trailing whitespace from doc comments as multiple trailing spaces
// indicates a hard line break in Markdown.
let lines = doc.lines().map(|line| {
line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..])
});
buf.extend(Itertools::intersperse(lines, "\n"));
}
buf.push('\n');
}
buf.pop();
if buf.is_empty() { None } else { Some(buf) }
}
macro_rules! impl_has_docs {
($($def:ident,)*) => {$(
impl HasDocs for hir::$def {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
docs_from_attrs(&self.attrs(db)).map(Documentation)
}
fn docs_with_rangemap(
self,
db: &dyn HirDatabase,
) -> Option<(Documentation, DocsRangeMap)> {
docs_with_rangemap(db, &self.attrs(db))
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
resolve_doc_path_on(db, self, link, ns, is_inner_doc)
}
}
)*};
}
impl_has_docs![
Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
];
macro_rules! impl_has_docs_enum {
($($variant:ident),* for $enum:ident) => {$(
impl HasDocs for hir::$variant {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
hir::$enum::$variant(self).docs(db)
}
fn docs_with_rangemap(
self,
db: &dyn HirDatabase,
) -> Option<(Documentation, DocsRangeMap)> {
hir::$enum::$variant(self).docs_with_rangemap(db)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc)
}
}
)*};
}
impl_has_docs_enum![Struct, Union, Enum for Adt];
impl HasDocs for hir::AssocItem {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
match self {
hir::AssocItem::Function(it) => it.docs(db),
hir::AssocItem::Const(it) => it.docs(db),
hir::AssocItem::TypeAlias(it) => it.docs(db),
}
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
match self {
hir::AssocItem::Function(it) => it.docs_with_rangemap(db),
hir::AssocItem::Const(it) => it.docs_with_rangemap(db),
hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db),
}
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
match self {
hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
}
}
}
impl HasDocs for hir::ExternCrateDecl {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let crate_docs = docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db));
let decl_docs = docs_from_attrs(&self.attrs(db));
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(Some(mut decl_docs), Some(crate_docs)) => {
decl_docs.push('\n');
decl_docs.push('\n');
decl_docs += &crate_docs;
Some(decl_docs)
}
}
.map(Documentation::new)
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db));
let decl_docs = docs_with_rangemap(db, &self.attrs(db));
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(
Some((Documentation(mut decl_docs), mut decl_range_map)),
Some((Documentation(crate_docs), crate_range_map)),
) => {
decl_docs.push('\n');
decl_docs.push('\n');
let offset = TextSize::new(decl_docs.len() as u32);
decl_docs += &crate_docs;
let crate_range_map = crate_range_map.shift_docstring_line_range(offset);
decl_range_map.mapping.extend(crate_range_map.mapping);
Some((Documentation(decl_docs), decl_range_map))
}
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
self.hir_docs(db).map(Cow::Borrowed)
}
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
is_inner_doc: bool,
is_inner_doc: hir::IsInnerDoc,
) -> Option<hir::DocLinkDef> {
resolve_doc_path_on(db, self, link, ns, is_inner_doc)
}
}
fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
match it.expr() {
// #[doc = lit]
Some(ast::Expr::Literal(lit)) => match lit.kind() {
ast::LiteralKind::String(it) => Some(it),
_ => None,
},
// #[cfg_attr(..., doc = "", ...)]
None => {
// FIXME: See highlight injection for what to do here
None
}
_ => None,
}
macro_rules! impl_has_docs {
($($def:ident,)*) => {$(
impl HasDocs for hir::$def {}
)*};
}
fn doc_indent(attrs: &hir::Attrs) -> usize {
let mut min = !0;
for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) {
if let Some(m) =
val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min()
{
min = min.min(m);
impl_has_docs![
Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
AssocItem, Struct, Union, Enum,
];
impl HasDocs for hir::ExternCrateDecl {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation<'_>> {
let crate_docs = self.resolved_crate(db)?.hir_docs(db);
let decl_docs = self.hir_docs(db);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(docs), None) | (None, Some(docs)) => {
Some(Documentation::new_borrowed(docs.docs()))
}
(Some(decl_docs), Some(crate_docs)) => {
let mut docs = String::with_capacity(
decl_docs.docs().len() + "\n\n".len() + crate_docs.docs().len(),
);
docs.push_str(decl_docs.docs());
docs.push_str("\n\n");
docs.push_str(crate_docs.docs());
Some(Documentation::new_owned(docs))
}
}
}
fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<Cow<'_, hir::Docs>> {
let crate_docs = self.resolved_crate(db)?.hir_docs(db);
let decl_docs = self.hir_docs(db);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(docs), None) | (None, Some(docs)) => Some(Cow::Borrowed(docs)),
(Some(decl_docs), Some(crate_docs)) => {
let mut docs = decl_docs.clone();
docs.append_str("\n\n");
docs.append(crate_docs);
Some(Cow::Owned(docs))
}
}
}
min
}

View file

@ -25,18 +25,14 @@ impl RootDatabase {
// We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`.
std::panic::catch_unwind(|| {
let mut db = RootDatabase::default();
let fixture = test_fixture::ChangeFixture::parse_with_proc_macros(
&db,
text,
minicore.0,
Vec::new(),
);
let fixture =
test_fixture::ChangeFixture::parse_with_proc_macros(text, minicore.0, Vec::new());
db.apply_change(fixture.change);
let files = fixture
.files
.into_iter()
.zip(fixture.file_lines)
.map(|(file_id, range)| (file_id.file_id(&db), range))
.map(|(file_id, range)| (file_id.file_id(), range))
.collect();
(db, files, fixture.sysroot_files)
})
@ -525,7 +521,7 @@ impl_empty_upmap_from_ra_fixture!(
&str,
String,
SmolStr,
Documentation,
Documentation<'_>,
SymbolKind,
CfgExpr,
ReferenceCategory,

View file

@ -33,7 +33,7 @@ pub fn is_rust_fence(s: &str) -> bool {
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
pub fn format_docs(src: &Documentation) -> String {
pub fn format_docs(src: &Documentation<'_>) -> String {
format_docs_(src.as_str())
}

View file

@ -12,7 +12,7 @@ use either::Either;
use hir::{
Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs,
HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
ModuleSource, PathResolution, Semantics, Visibility, sym,
ModuleSource, PathResolution, Semantics, Visibility,
};
use memchr::memmem::Finder;
use parser::SyntaxKind;
@ -169,7 +169,7 @@ impl SearchScope {
entries.extend(
source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
.map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)),
);
}
SearchScope { entries }
@ -183,11 +183,9 @@ impl SearchScope {
let source_root = db.file_source_root(root_file).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
entries.extend(
source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
);
entries.extend(source_root.iter().map(|id| {
(EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None)
}));
}
SearchScope { entries }
}
@ -201,7 +199,7 @@ impl SearchScope {
SearchScope {
entries: source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
.map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None))
.collect(),
}
}
@ -368,7 +366,7 @@ impl Definition {
if let Definition::Macro(macro_def) = self {
return match macro_def.kind(db) {
hir::MacroKind::Declarative => {
if macro_def.attrs(db).by_key(sym::macro_export).exists() {
if macro_def.attrs(db).is_macro_export() {
SearchScope::reverse_dependencies(db, module.krate())
} else {
SearchScope::krate(db, module.krate())

View file

@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(0),
@ -16,7 +16,7 @@
Struct(
Struct {
id: StructId(
3401,
3801,
),
},
),
@ -24,7 +24,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -50,7 +50,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -58,7 +58,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -84,7 +84,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -92,7 +92,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -118,7 +118,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -126,7 +126,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -152,7 +152,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -160,7 +160,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -186,7 +186,7 @@
Struct(
Struct {
id: StructId(
3401,
3801,
),
},
),
@ -194,7 +194,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -220,7 +220,7 @@
Struct(
Struct {
id: StructId(
3400,
3800,
),
},
),
@ -228,7 +228,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {

View file

@ -3,7 +3,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(0),
@ -22,7 +22,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -49,14 +49,14 @@
def: TypeAlias(
TypeAlias {
id: TypeAliasId(
6800,
6c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -88,7 +88,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -115,14 +115,14 @@
def: Const(
Const {
id: ConstId(
6000,
6400,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -147,14 +147,14 @@
def: Const(
Const {
id: ConstId(
6002,
6402,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -180,7 +180,7 @@
Enum(
Enum {
id: EnumId(
4c00,
5000,
),
},
),
@ -188,7 +188,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -214,7 +214,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -222,7 +222,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -248,7 +248,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -256,7 +256,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -281,14 +281,14 @@
def: Static(
Static {
id: StaticId(
6400,
6800,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -314,7 +314,7 @@
Struct(
Struct {
id: StructId(
4401,
4801,
),
},
),
@ -322,7 +322,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -348,7 +348,7 @@
Struct(
Struct {
id: StructId(
4400,
4800,
),
},
),
@ -356,7 +356,7 @@
loc: DeclarationLocation {
hir_file_id: MacroFile(
MacroCallId(
Id(3800),
Id(3c00),
),
),
ptr: SyntaxNodePtr {
@ -382,7 +382,7 @@
Struct(
Struct {
id: StructId(
4405,
4805,
),
},
),
@ -390,7 +390,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -418,7 +418,7 @@
Struct(
Struct {
id: StructId(
4406,
4806,
),
},
),
@ -426,7 +426,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -454,7 +454,7 @@
Struct(
Struct {
id: StructId(
4407,
4807,
),
},
),
@ -462,7 +462,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -488,7 +488,7 @@
Struct(
Struct {
id: StructId(
4402,
4802,
),
},
),
@ -496,7 +496,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -521,14 +521,14 @@
def: Trait(
Trait {
id: TraitId(
5800,
5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -554,7 +554,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -562,7 +562,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -588,7 +588,7 @@
Union(
Union {
id: UnionId(
5000,
5400,
),
},
),
@ -596,7 +596,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -622,7 +622,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(1),
@ -632,7 +632,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -658,7 +658,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(2),
@ -668,7 +668,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -694,7 +694,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3401,
3801,
),
),
},
@ -702,7 +702,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -727,14 +727,14 @@
def: Function(
Function {
id: FunctionId(
5c02,
6002,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -761,14 +761,14 @@
def: Function(
Function {
id: FunctionId(
5c01,
6001,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -796,7 +796,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3400,
3800,
),
),
},
@ -804,7 +804,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -829,14 +829,14 @@
def: Function(
Function {
id: FunctionId(
5c00,
6000,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -862,7 +862,7 @@
Macro {
id: MacroRulesId(
MacroRulesId(
3401,
3801,
),
),
},
@ -870,7 +870,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -895,14 +895,14 @@
def: Function(
Function {
id: FunctionId(
5c03,
6003,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -930,7 +930,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(1),
@ -943,7 +943,7 @@
Struct(
Struct {
id: StructId(
4403,
4803,
),
},
),
@ -951,7 +951,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {
@ -977,7 +977,7 @@
Module {
id: ModuleId {
krate: Crate(
Id(3000),
Id(2c00),
),
block: None,
local_id: Idx::<ModuleData>(2),
@ -989,14 +989,14 @@
def: Trait(
Trait {
id: TraitId(
5800,
5c00,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1022,7 +1022,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -1030,7 +1030,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1056,7 +1056,7 @@
Struct(
Struct {
id: StructId(
4404,
4804,
),
},
),
@ -1064,7 +1064,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1090,7 +1090,7 @@
Macro {
id: Macro2Id(
Macro2Id(
4800,
4c00,
),
),
},
@ -1098,7 +1098,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -1124,7 +1124,7 @@
Struct(
Struct {
id: StructId(
4404,
4804,
),
},
),
@ -1132,7 +1132,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {

View file

@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {

View file

@ -13,7 +13,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2001),
Id(3001),
),
),
ptr: SyntaxNodePtr {
@ -47,7 +47,7 @@
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
Id(3000),
),
),
ptr: SyntaxNodePtr {

View file

@ -114,8 +114,7 @@ fn assoc_item_of_trait(
#[cfg(test)]
mod tests {
use expect_test::{Expect, expect};
use hir::FilePosition;
use hir::Semantics;
use hir::{EditionedFileId, FilePosition, Semantics};
use span::Edition;
use syntax::ast::{self, AstNode};
use test_fixture::ChangeFixture;
@ -127,10 +126,11 @@ mod tests {
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> (RootDatabase, FilePosition) {
let mut database = RootDatabase::default();
let change_fixture = ChangeFixture::parse(&database, ra_fixture);
let change_fixture = ChangeFixture::parse(ra_fixture);
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
let file_id = EditionedFileId::from_span_guess_origin(&database, file_id);
let offset = range_or_offset.expect_offset();
(database, FilePosition { file_id, offset })
}