internal: Don't kick off inference in Semantics::descend_into_macros_impl

This commit is contained in:
Lukas Wirth 2021-12-20 13:19:48 +01:00
parent 2ca3834c9f
commit 37a87708ae
2 changed files with 48 additions and 13 deletions

View file

@ -528,7 +528,7 @@ impl<'db> SemanticsImpl<'db> {
if first == last { if first == last {
self.descend_into_macros_impl( self.descend_into_macros_impl(
first, first,
|InFile { value, .. }| { &mut |InFile { value, .. }| {
if let Some(node) = value.ancestors().find_map(N::cast) { if let Some(node) = value.ancestors().find_map(N::cast) {
res.push(node) res.push(node)
} }
@ -540,7 +540,7 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch: SmallVec<[_; 1]> = smallvec![]; let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl( self.descend_into_macros_impl(
first, first,
|token| { &mut |token| {
scratch.push(token); scratch.push(token);
}, },
false, false,
@ -549,7 +549,7 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch = scratch.into_iter(); let mut scratch = scratch.into_iter();
self.descend_into_macros_impl( self.descend_into_macros_impl(
last, last,
|InFile { value: last, file_id: last_fid }| { &mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid { if first_fid == last_fid {
if let Some(p) = first.parent() { if let Some(p) = first.parent() {
@ -574,20 +574,20 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false); self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false);
res res
} }
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
let mut res = token.clone(); let mut res = token.clone();
self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true); self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true);
res res
} }
fn descend_into_macros_impl( fn descend_into_macros_impl(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
mut f: impl FnMut(InFile<SyntaxToken>), f: &mut dyn FnMut(InFile<SyntaxToken>),
single: bool, single: bool,
) { ) {
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
@ -595,7 +595,7 @@ impl<'db> SemanticsImpl<'db> {
Some(it) => it, Some(it) => it,
None => return, None => return,
}; };
let sa = self.analyze(&parent); let sa = self.analyze_no_infer(&parent);
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut(); let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut();
@ -927,14 +927,23 @@ impl<'db> SemanticsImpl<'db> {
} }
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
self.analyze_impl(node, None) self.analyze_impl(node, None, true)
} }
fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer { fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
self.analyze_impl(node, Some(offset)) self.analyze_impl(node, Some(offset), true)
} }
fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer { fn analyze_no_infer(&self, node: &SyntaxNode) -> SourceAnalyzer {
self.analyze_impl(node, None, false)
}
fn analyze_impl(
&self,
node: &SyntaxNode,
offset: Option<TextSize>,
infer_body: bool,
) -> SourceAnalyzer {
let _p = profile::span("Semantics::analyze_impl"); let _p = profile::span("Semantics::analyze_impl");
let node = self.find_file(node.clone()); let node = self.find_file(node.clone());
let node = node.as_ref(); let node = node.as_ref();
@ -946,7 +955,11 @@ impl<'db> SemanticsImpl<'db> {
let resolver = match container { let resolver = match container {
ChildContainer::DefWithBodyId(def) => { ChildContainer::DefWithBodyId(def) => {
return SourceAnalyzer::new_for_body(self.db, def, node, offset) return if infer_body {
SourceAnalyzer::new_for_body(self.db, def, node, offset)
} else {
SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
}
} }
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),

View file

@ -50,7 +50,7 @@ impl SourceAnalyzer {
pub(crate) fn new_for_body( pub(crate) fn new_for_body(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: DefWithBodyId, def: DefWithBodyId,
node: InFile<&SyntaxNode>, node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
offset: Option<TextSize>, offset: Option<TextSize>,
) -> SourceAnalyzer { ) -> SourceAnalyzer {
let (body, source_map) = db.body_with_source_map(def); let (body, source_map) = db.body_with_source_map(def);
@ -65,7 +65,29 @@ impl SourceAnalyzer {
body: Some(body), body: Some(body),
body_source_map: Some(source_map), body_source_map: Some(source_map),
infer: Some(db.infer(def)), infer: Some(db.infer(def)),
file_id: node.file_id, file_id,
}
}
pub(crate) fn new_for_body_no_infer(
db: &dyn HirDatabase,
def: DefWithBodyId,
node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
offset: Option<TextSize>,
) -> SourceAnalyzer {
let (body, source_map) = db.body_with_source_map(def);
let scopes = db.expr_scopes(def);
let scope = match offset {
None => scope_for(&scopes, &source_map, node),
Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)),
};
let resolver = resolver_for_scope(db.upcast(), def, scope);
SourceAnalyzer {
resolver,
body: Some(body),
body_source_map: Some(source_map),
infer: None,
file_id,
} }
} }