mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-02 06:41:48 +00:00
Try caching macro calls more aggressively
This commit is contained in:
parent
f28f15ac6e
commit
97b58f2846
11 changed files with 382 additions and 300 deletions
|
@ -395,6 +395,12 @@ impl BodySourceMap {
|
|||
self.expr_map.get(&src).copied()
|
||||
}
|
||||
|
||||
pub fn expansions(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (&InFile<AstPtr<ast::MacroCall>>, &MacroFileId)> {
|
||||
self.expansions.iter()
|
||||
}
|
||||
|
||||
pub fn implicit_format_args(
|
||||
&self,
|
||||
node: InFile<&ast::FormatArgsExpr>,
|
||||
|
|
|
@ -12,6 +12,7 @@ use intern::Interned;
|
|||
use rustc_hash::FxHashMap;
|
||||
use smallvec::SmallVec;
|
||||
use span::AstIdMap;
|
||||
use stdx::never;
|
||||
use syntax::{
|
||||
ast::{
|
||||
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
|
||||
|
@ -480,7 +481,8 @@ impl ExprCollector<'_> {
|
|||
} else if e.const_token().is_some() {
|
||||
Mutability::Shared
|
||||
} else {
|
||||
unreachable!("parser only remaps to raw_token() if matching mutability token follows")
|
||||
never!("parser only remaps to raw_token() if matching mutability token follows");
|
||||
Mutability::Shared
|
||||
}
|
||||
} else {
|
||||
Mutability::from_mutable(e.mut_token().is_some())
|
||||
|
@ -1006,9 +1008,9 @@ impl ExprCollector<'_> {
|
|||
Some((mark, expansion)) => {
|
||||
// Keep collecting even with expansion errors so we can provide completions and
|
||||
// other services in incomplete macro expressions.
|
||||
self.source_map
|
||||
.expansions
|
||||
.insert(macro_call_ptr, self.expander.current_file_id().macro_file().unwrap());
|
||||
if let Some(macro_file) = self.expander.current_file_id().macro_file() {
|
||||
self.source_map.expansions.insert(macro_call_ptr, macro_file);
|
||||
}
|
||||
let prev_ast_id_map = mem::replace(
|
||||
&mut self.ast_id_map,
|
||||
self.db.ast_id_map(self.expander.current_file_id()),
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
use either::Either;
|
||||
use hir_expand::{attrs::collect_attrs, HirFileId};
|
||||
use syntax::ast;
|
||||
use syntax::{ast, AstPtr};
|
||||
|
||||
use crate::{
|
||||
db::DefDatabase,
|
||||
|
@ -38,7 +38,7 @@ impl ChildBySource for TraitId {
|
|||
|
||||
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|
||||
|(ast_id, call_id)| {
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
|
||||
},
|
||||
);
|
||||
data.items.iter().for_each(|&(_, item)| {
|
||||
|
@ -50,9 +50,10 @@ impl ChildBySource for TraitId {
|
|||
impl ChildBySource for ImplId {
|
||||
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
|
||||
let data = db.impl_data(*self);
|
||||
// FIXME: Macro calls
|
||||
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|
||||
|(ast_id, call_id)| {
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
|
||||
},
|
||||
);
|
||||
data.items.iter().for_each(|&item| {
|
||||
|
@ -80,7 +81,7 @@ impl ChildBySource for ItemScope {
|
|||
.for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
|
||||
self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|
||||
|(ast_id, call_id)| {
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
|
||||
},
|
||||
);
|
||||
self.legacy_macros().for_each(|(_, ids)| {
|
||||
|
@ -88,7 +89,7 @@ impl ChildBySource for ItemScope {
|
|||
if let MacroId::MacroRulesId(id) = id {
|
||||
let loc = id.lookup(db);
|
||||
if loc.id.file_id() == file_id {
|
||||
res[keys::MACRO_RULES].insert(loc.source(db).value, id);
|
||||
res[keys::MACRO_RULES].insert(loc.ast_ptr(db).value, id);
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -100,12 +101,18 @@ impl ChildBySource for ItemScope {
|
|||
if let Some((_, Either::Left(attr))) =
|
||||
collect_attrs(&adt).nth(attr_id.ast_index())
|
||||
{
|
||||
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
|
||||
res[keys::DERIVE_MACRO_CALL]
|
||||
.insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each(
|
||||
|(ast_id, &call)| {
|
||||
let ast = ast_id.to_ptr(db.upcast());
|
||||
res[keys::MACRO_CALL].insert(ast, call);
|
||||
},
|
||||
);
|
||||
fn add_module_def(
|
||||
db: &dyn DefDatabase,
|
||||
map: &mut DynMap,
|
||||
|
@ -155,8 +162,8 @@ impl ChildBySource for VariantId {
|
|||
for (local_id, source) in arena_map.value.iter() {
|
||||
let id = FieldId { parent, local_id };
|
||||
match source.clone() {
|
||||
Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id),
|
||||
Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id),
|
||||
Either::Left(source) => res[keys::TUPLE_FIELD].insert(AstPtr::new(&source), id),
|
||||
Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -171,29 +178,30 @@ impl ChildBySource for EnumId {
|
|||
|
||||
let tree = loc.id.item_tree(db);
|
||||
let ast_id_map = db.ast_id_map(loc.id.file_id());
|
||||
let root = db.parse_or_expand(loc.id.file_id());
|
||||
|
||||
db.enum_data(*self).variants.iter().for_each(|&(variant, _)| {
|
||||
res[keys::ENUM_VARIANT].insert(
|
||||
ast_id_map.get(tree[variant.lookup(db).id.value].ast_id).to_node(&root),
|
||||
variant,
|
||||
);
|
||||
res[keys::ENUM_VARIANT]
|
||||
.insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl ChildBySource for DefWithBodyId {
|
||||
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
|
||||
let body = db.body(*self);
|
||||
let (body, sm) = db.body_with_source_map(*self);
|
||||
if let &DefWithBodyId::VariantId(v) = self {
|
||||
VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id)
|
||||
}
|
||||
|
||||
sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
|
||||
res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
|
||||
});
|
||||
|
||||
for (block, def_map) in body.blocks(db) {
|
||||
// All block expressions are merged into the same map, because they logically all add
|
||||
// inner items to the containing `DefWithBodyId`.
|
||||
def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id);
|
||||
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_node(db.upcast()), block);
|
||||
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -220,13 +228,17 @@ impl ChildBySource for GenericDefId {
|
|||
{
|
||||
let id = TypeOrConstParamId { parent: *self, local_id };
|
||||
match ast_param {
|
||||
ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id),
|
||||
ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id),
|
||||
ast::TypeOrConstParam::Type(a) => {
|
||||
res[keys::TYPE_PARAM].insert(AstPtr::new(&a), id)
|
||||
}
|
||||
ast::TypeOrConstParam::Const(a) => {
|
||||
res[keys::CONST_PARAM].insert(AstPtr::new(&a), id)
|
||||
}
|
||||
}
|
||||
}
|
||||
for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
|
||||
let id = LifetimeParamId { parent: *self, local_id };
|
||||
res[keys::LIFETIME_PARAM].insert(ast_param, id);
|
||||
res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -246,7 +258,7 @@ fn insert_item_loc<ID, N, Data>(
|
|||
{
|
||||
let loc = id.lookup(db);
|
||||
if loc.item_tree_id().file_id() == file_id {
|
||||
res[key].insert(loc.source(db).value, id)
|
||||
res[key].insert(loc.ast_ptr(db).value, id)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ use crate::{
|
|||
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
|
||||
};
|
||||
|
||||
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
|
||||
pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
|
||||
|
||||
pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new();
|
||||
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
|
||||
|
@ -39,6 +39,7 @@ pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
|
|||
pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
|
||||
pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
|
||||
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
|
||||
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
|
||||
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
|
||||
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
|
||||
Key::new();
|
||||
|
@ -54,18 +55,16 @@ pub struct AstPtrPolicy<AST, ID> {
|
|||
}
|
||||
|
||||
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
|
||||
type K = AST;
|
||||
type K = AstPtr<AST>;
|
||||
type V = ID;
|
||||
fn insert(map: &mut DynMap, key: AST, value: ID) {
|
||||
let key = AstPtr::new(&key);
|
||||
fn insert(map: &mut DynMap, key: AstPtr<AST>, value: ID) {
|
||||
map.map
|
||||
.entry::<FxHashMap<AstPtr<AST>, ID>>()
|
||||
.or_insert_with(Default::default)
|
||||
.insert(key, value);
|
||||
}
|
||||
fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> {
|
||||
let key = AstPtr::new(key);
|
||||
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
|
||||
fn get<'a>(map: &'a DynMap, key: &AstPtr<AST>) -> Option<&'a ID> {
|
||||
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(key)
|
||||
}
|
||||
fn is_empty(map: &DynMap) -> bool {
|
||||
map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue