Remove most of the duplication from Semantics{,Impl} via deref

This commit is contained in:
Maybe Waffle 2023-09-13 22:02:28 +00:00
parent 089ae47ebe
commit a219dbda2b

View file

@ -127,165 +127,24 @@ impl<DB> fmt::Debug for Semantics<'_, DB> {
} }
} }
impl<'db, DB> ops::Deref for Semantics<'db, DB> {
type Target = SemanticsImpl<'db>;
fn deref(&self) -> &Self::Target {
&self.imp
}
}
impl<'db, DB: HirDatabase> Semantics<'db, DB> { impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn new(db: &DB) -> Semantics<'_, DB> { pub fn new(db: &DB) -> Semantics<'_, DB> {
let impl_ = SemanticsImpl::new(db); let impl_ = SemanticsImpl::new(db);
Semantics { db, imp: impl_ } Semantics { db, imp: impl_ }
} }
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
self.imp.parse(file_id)
}
pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
self.imp.parse_or_expand(file_id)
}
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
self.imp.expand(macro_call)
}
/// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
self.imp.expand_attr_macro(item)
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
self.imp.expand_derive_as_pseudo_attr_macro(attr)
}
pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
self.imp.resolve_derive_macro(derive)
}
pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
self.imp.expand_derive_macro(derive)
}
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
self.imp.is_attr_macro_call(item)
}
pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
self.imp.is_derive_annotated(item)
}
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand(
&self,
actual_macro_call: &ast::MacroCall,
speculative_args: &ast::TokenTree,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
}
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand_attr_macro(
&self,
actual_macro_call: &ast::Item,
speculative_args: &ast::Item,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
}
pub fn speculative_expand_derive_as_pseudo_attr_macro(
&self,
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand_derive_as_pseudo_attr_macro(
actual_macro_call,
speculative_args,
token_to_map,
)
}
/// Descend the token into its macro call if it is part of one, returning the token in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
self.imp.descend_into_macros_single(token, offset)
}
/// Descend the token into its macro call if it is part of one, returning the tokens in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros(token, offset)
}
/// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
///
/// Returns the original non descended token if none of the mapped counterparts have the same text.
pub fn descend_into_macros_with_same_text(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros_with_same_text(token, offset)
}
pub fn descend_into_macros_with_kind_preference(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
self.imp.descend_into_macros_with_kind_preference(token, offset)
}
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
self.imp.descend_node_into_attributes(node)
}
/// Search for a definition's source and cache its syntax tree
pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
where
Def::Ast: AstNode,
{
self.imp.source(def)
}
pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId { pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
self.imp.find_file(syntax_node).file_id self.imp.find_file(syntax_node).file_id
} }
/// Attempts to map the node out of macro expanded files returning the original file range.
/// If upmapping is not possible, this will fall back to the range of the macro call of the
/// macro file the node resides in.
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
self.imp.original_range(node)
}
/// Attempts to map the node out of macro expanded files returning the original file range.
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
self.imp.original_range_opt(node)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.imp.original_ast_node(node)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
self.imp.original_syntax_node(node)
}
pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
self.imp.diagnostics_display_range(diagnostics)
}
pub fn token_ancestors_with_macros( pub fn token_ancestors_with_macros(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
@ -293,19 +152,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it)) token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
} }
/// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
self.imp.ancestors_with_macros(node)
}
pub fn ancestors_at_offset_with_macros(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> + '_ {
self.imp.ancestors_at_offset_with_macros(node, offset)
}
/// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
/// search up until it is of the target AstNode type /// search up until it is of the target AstNode type
pub fn find_node_at_offset_with_macros<N: AstNode>( pub fn find_node_at_offset_with_macros<N: AstNode>(
@ -336,53 +182,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
} }
pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
self.imp.resolve_lifetime_param(lifetime)
}
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
self.imp.resolve_label(lifetime)
}
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
self.imp.resolve_type(ty)
}
pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
self.imp.resolve_trait(trait_)
}
pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
self.imp.expr_adjustments(expr)
}
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
self.imp.type_of_expr(expr)
}
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
self.imp.type_of_pat(pat)
}
/// It also includes the changes that binding mode makes in the type. For example in
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
/// of this function is `&mut Option<T>`
pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
self.imp.type_of_binding_in_pat(pat)
}
pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
self.imp.type_of_self(param)
}
pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
self.imp.pattern_adjustments(pat)
}
pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
self.imp.binding_mode_of_pat(pat)
}
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.imp.resolve_method_call(call).map(Function::from) self.imp.resolve_method_call(call).map(Function::from)
} }
@ -417,61 +216,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_try_expr(try_expr).map(Function::from) self.imp.resolve_try_expr(try_expr).map(Function::from)
} }
pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
self.imp.resolve_method_call_as_callable(call)
}
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
self.imp.resolve_field(field)
}
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
) -> Option<(Field, Option<Local>, Type)> {
self.imp.resolve_record_field(field)
}
pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.imp.resolve_record_pat_field(field)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
self.imp.resolve_macro_call(macro_call)
}
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
self.imp.is_unsafe_macro_call(macro_call)
}
pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
self.imp.resolve_attr_macro_call(item)
}
pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.imp.resolve_path(path)
}
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> { pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
self.imp.resolve_variant(record_lit).map(VariantDef::from) self.imp.resolve_variant(record_lit).map(VariantDef::from)
} }
pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
self.imp.resolve_bind_pat_to_const(pat)
}
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.imp.record_literal_missing_fields(literal)
}
pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
self.imp.record_pattern_missing_fields(pattern)
}
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
self.imp.to_def(src)
}
pub fn to_module_def(&self, file: FileId) -> Option<Module> { pub fn to_module_def(&self, file: FileId) -> Option<Module> {
self.imp.to_module_def(file).next() self.imp.to_module_def(file).next()
} }
@ -479,39 +227,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> { pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.imp.to_module_def(file) self.imp.to_module_def(file)
} }
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.imp.scope(node)
}
pub fn scope_at_offset(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<SemanticsScope<'db>> {
self.imp.scope_at_offset(node, offset)
}
pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.imp.assert_contains_node(node)
}
pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
self.imp.is_unsafe_method_call(method_call_expr)
}
pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
self.imp.is_unsafe_ref_expr(ref_expr)
}
pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
self.imp.is_unsafe_ident_pat(ident_pat)
}
/// Returns `true` if the `node` is inside an `unsafe` context.
pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
self.imp.is_inside_unsafe(expr)
}
} }
impl<'db> SemanticsImpl<'db> { impl<'db> SemanticsImpl<'db> {
@ -525,32 +240,33 @@ impl<'db> SemanticsImpl<'db> {
} }
} }
fn parse(&self, file_id: FileId) -> ast::SourceFile { pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
let tree = self.db.parse(file_id).tree(); let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into()); self.cache(tree.syntax().clone(), file_id.into());
tree tree
} }
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode { pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
let node = self.db.parse_or_expand(file_id); let node = self.db.parse_or_expand(file_id);
self.cache(node.clone(), file_id); self.cache(node.clone(), file_id);
node node
} }
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?; let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let node = self.parse_or_expand(file_id); let node = self.parse_or_expand(file_id);
Some(node) Some(node)
} }
fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> { /// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone()); let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
Some(self.parse_or_expand(macro_call_id.as_file())) Some(self.parse_or_expand(macro_call_id.as_file()))
} }
fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> { pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let src = self.wrap_node_infile(attr.clone()); let src = self.wrap_node_infile(attr.clone());
let call_id = self.with_ctx(|ctx| { let call_id = self.with_ctx(|ctx| {
@ -559,7 +275,7 @@ impl<'db> SemanticsImpl<'db> {
Some(self.parse_or_expand(call_id.as_file())) Some(self.parse_or_expand(call_id.as_file()))
} }
fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> { pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
let calls = self.derive_macro_calls(attr)?; let calls = self.derive_macro_calls(attr)?;
self.with_ctx(|ctx| { self.with_ctx(|ctx| {
Some( Some(
@ -573,7 +289,7 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> { pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
let res: Vec<_> = self let res: Vec<_> = self
.derive_macro_calls(attr)? .derive_macro_calls(attr)?
.into_iter() .into_iter()
@ -598,19 +314,21 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn is_derive_annotated(&self, adt: &ast::Adt) -> bool { pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
let file_id = self.find_file(adt.syntax()).file_id; let file_id = self.find_file(adt.syntax()).file_id;
let adt = InFile::new(file_id, adt); let adt = InFile::new(file_id, adt);
self.with_ctx(|ctx| ctx.has_derives(adt)) self.with_ctx(|ctx| ctx.has_derives(adt))
} }
fn is_attr_macro_call(&self, item: &ast::Item) -> bool { pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id; let file_id = self.find_file(item.syntax()).file_id;
let src = InFile::new(file_id, item.clone()); let src = InFile::new(file_id, item.clone());
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
} }
fn speculative_expand( /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand(
&self, &self,
actual_macro_call: &ast::MacroCall, actual_macro_call: &ast::MacroCall,
speculative_args: &ast::TokenTree, speculative_args: &ast::TokenTree,
@ -633,7 +351,9 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn speculative_expand_attr( /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand_attr_macro(
&self, &self,
actual_macro_call: &ast::Item, actual_macro_call: &ast::Item,
speculative_args: &ast::Item, speculative_args: &ast::Item,
@ -649,7 +369,7 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn speculative_expand_derive_as_pseudo_attr_macro( pub fn speculative_expand_derive_as_pseudo_attr_macro(
&self, &self,
actual_macro_call: &ast::Attr, actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr, speculative_args: &ast::Attr,
@ -668,8 +388,9 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now // This might not be the correct way to do this, but it works for now
fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
let tokens = (|| { let tokens = (|| {
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
@ -723,7 +444,10 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros( /// Descend the token into its macro call if it is part of one, returning the tokens in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize, offset: TextSize,
@ -736,7 +460,10 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros_with_same_text( /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
///
/// Returns the original non descended token if none of the mapped counterparts have the same text.
pub fn descend_into_macros_with_same_text(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize, offset: TextSize,
@ -755,7 +482,7 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros_with_kind_preference( pub fn descend_into_macros_with_kind_preference(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize, offset: TextSize,
@ -785,7 +512,10 @@ impl<'db> SemanticsImpl<'db> {
res.unwrap_or(token) res.unwrap_or(token)
} }
fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken { /// Descend the token into its macro call if it is part of one, returning the token in the
/// expansion that it is associated with. If `offset` points into the token's range, it will
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
let mut res = token.clone(); let mut res = token.clone();
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res = value; res = value;
@ -995,17 +725,23 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn original_range(&self, node: &SyntaxNode) -> FileRange { /// Attempts to map the node out of macro expanded files returning the original file range.
/// If upmapping is not possible, this will fall back to the range of the macro call of the
/// macro file the node resides in.
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
let node = self.find_file(node); let node = self.find_file(node);
node.original_file_range(self.db.upcast()) node.original_file_range(self.db.upcast())
} }
fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> { /// Attempts to map the node out of macro expanded files returning the original file range.
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node); let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast()) node.original_file_range_opt(self.db.upcast())
} }
fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> { /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
|InFile { file_id, value }| { |InFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id); self.cache(find_root(value.syntax()), file_id);
@ -1014,7 +750,9 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> { /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node); let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|InFile { file_id, value }| { |InFile { file_id, value }| {
@ -1024,7 +762,7 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange { pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
let root = self.parse_or_expand(src.file_id); let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root)); let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range(self.db.upcast()) node.as_ref().original_file_range(self.db.upcast())
@ -1037,7 +775,8 @@ impl<'db> SemanticsImpl<'db> {
token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent)) token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
} }
fn ancestors_with_macros( /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
pub fn ancestors_with_macros(
&self, &self,
node: SyntaxNode, node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ { ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
@ -1055,7 +794,7 @@ impl<'db> SemanticsImpl<'db> {
.map(|it| it.value) .map(|it| it.value)
} }
fn ancestors_at_offset_with_macros( pub fn ancestors_at_offset_with_macros(
&self, &self,
node: &SyntaxNode, node: &SyntaxNode,
offset: TextSize, offset: TextSize,
@ -1065,7 +804,7 @@ impl<'db> SemanticsImpl<'db> {
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
} }
fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> { pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
let text = lifetime.text(); let text = lifetime.text();
let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| { let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?; let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
@ -1076,7 +815,7 @@ impl<'db> SemanticsImpl<'db> {
ToDef::to_def(self, src) ToDef::to_def(self, src)
} }
fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> { pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
let text = lifetime.text(); let text = lifetime.text();
let label = lifetime.syntax().ancestors().find_map(|syn| { let label = lifetime.syntax().ancestors().find_map(|syn| {
let label = match_ast! { let label = match_ast! {
@ -1098,7 +837,7 @@ impl<'db> SemanticsImpl<'db> {
ToDef::to_def(self, src) ToDef::to_def(self, src)
} }
fn resolve_type(&self, ty: &ast::Type) -> Option<Type> { pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?; let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id); let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
let ty = hir_ty::TyLoweringContext::new( let ty = hir_ty::TyLoweringContext::new(
@ -1110,7 +849,7 @@ impl<'db> SemanticsImpl<'db> {
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
} }
fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> { pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?; let analyze = self.analyze(path.syntax())?;
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene); let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
@ -1121,7 +860,7 @@ impl<'db> SemanticsImpl<'db> {
} }
} }
fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> { pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
let mutability = |m| match m { let mutability = |m| match m {
hir_ty::Mutability::Not => Mutability::Shared, hir_ty::Mutability::Not => Mutability::Shared,
hir_ty::Mutability::Mut => Mutability::Mut, hir_ty::Mutability::Mut => Mutability::Mut,
@ -1165,33 +904,36 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> { pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
self.analyze(expr.syntax())? self.analyze(expr.syntax())?
.type_of_expr(self.db, expr) .type_of_expr(self.db, expr)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced }) .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
} }
fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> { pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
self.analyze(pat.syntax())? self.analyze(pat.syntax())?
.type_of_pat(self.db, pat) .type_of_pat(self.db, pat)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced }) .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
} }
fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> { /// It also includes the changes that binding mode makes in the type. For example in
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
/// of this function is `&mut Option<T>`
pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat) self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
} }
fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> { pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
self.analyze(param.syntax())?.type_of_self(self.db, param) self.analyze(param.syntax())?.type_of_self(self.db, param)
} }
fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> { pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
self.analyze(pat.syntax()) self.analyze(pat.syntax())
.and_then(|it| it.pattern_adjustments(self.db, pat)) .and_then(|it| it.pattern_adjustments(self.db, pat))
.unwrap_or_default() .unwrap_or_default()
} }
fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> { pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat) self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
} }
@ -1226,32 +968,32 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr) self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
} }
fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> { pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call) self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
} }
fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
self.analyze(field.syntax())?.resolve_field(self.db, field) self.analyze(field.syntax())?.resolve_field(self.db, field)
} }
fn resolve_record_field( pub fn resolve_record_field(
&self, &self,
field: &ast::RecordExprField, field: &ast::RecordExprField,
) -> Option<(Field, Option<Local>, Type)> { ) -> Option<(Field, Option<Local>, Type)> {
self.analyze(field.syntax())?.resolve_record_field(self.db, field) self.analyze(field.syntax())?.resolve_record_field(self.db, field)
} }
fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> { pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field) self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
} }
fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> { pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
let sa = self.analyze(macro_call.syntax())?; let sa = self.analyze(macro_call.syntax())?;
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call); let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
sa.resolve_macro_call(self.db, macro_call) sa.resolve_macro_call(self.db, macro_call)
} }
fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool { pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let sa = match self.analyze(macro_call.syntax()) { let sa = match self.analyze(macro_call.syntax()) {
Some(it) => it, Some(it) => it,
None => return false, None => return false,
@ -1260,7 +1002,7 @@ impl<'db> SemanticsImpl<'db> {
sa.is_unsafe_macro_call(self.db, macro_call) sa.is_unsafe_macro_call(self.db, macro_call)
} }
fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> { pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
let item_in_file = self.wrap_node_infile(item.clone()); let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| { let id = self.with_ctx(|ctx| {
let macro_call_id = ctx.item_to_macro_call(item_in_file)?; let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
@ -1269,7 +1011,7 @@ impl<'db> SemanticsImpl<'db> {
Some(Macro { id }) Some(Macro { id })
} }
fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.analyze(path.syntax())?.resolve_path(self.db, path) self.analyze(path.syntax())?.resolve_path(self.db, path)
} }
@ -1277,17 +1019,17 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit) self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
} }
fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat) self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
} }
fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.analyze(literal.syntax()) self.analyze(literal.syntax())
.and_then(|it| it.record_literal_missing_fields(self.db, literal)) .and_then(|it| it.record_literal_missing_fields(self.db, literal))
.unwrap_or_default() .unwrap_or_default()
} }
fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
self.analyze(pattern.syntax()) self.analyze(pattern.syntax())
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern)) .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
.unwrap_or_default() .unwrap_or_default()
@ -1299,7 +1041,7 @@ impl<'db> SemanticsImpl<'db> {
f(&mut ctx) f(&mut ctx)
} }
fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax()).with_value(src).cloned(); let src = self.find_file(src.syntax()).with_value(src).cloned();
T::to_def(self, src) T::to_def(self, src)
} }
@ -1308,7 +1050,7 @@ impl<'db> SemanticsImpl<'db> {
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from) self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
} }
fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> { pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope { self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db, db: self.db,
file_id, file_id,
@ -1316,7 +1058,11 @@ impl<'db> SemanticsImpl<'db> {
}) })
} }
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> { pub fn scope_at_offset(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<SemanticsScope<'db>> {
self.analyze_with_offset_no_infer(node, offset).map( self.analyze_with_offset_no_infer(node, offset).map(
|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope { |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db, db: self.db,
@ -1326,7 +1072,8 @@ impl<'db> SemanticsImpl<'db> {
) )
} }
fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>> /// Search for a definition's source and cache its syntax tree
pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
where where
Def::Ast: AstNode, Def::Ast: AstNode,
{ {
@ -1391,7 +1138,7 @@ impl<'db> SemanticsImpl<'db> {
assert!(prev == None || prev == Some(file_id)) assert!(prev == None || prev == Some(file_id))
} }
fn assert_contains_node(&self, node: &SyntaxNode) { pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.find_file(node); self.find_file(node);
} }
@ -1427,7 +1174,7 @@ impl<'db> SemanticsImpl<'db> {
InFile::new(file_id, node) InFile::new(file_id, node)
} }
fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool { pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
method_call_expr method_call_expr
.receiver() .receiver()
.and_then(|expr| { .and_then(|expr| {
@ -1450,7 +1197,7 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or(false) .unwrap_or(false)
} }
fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
ref_expr ref_expr
.expr() .expr()
.and_then(|expr| { .and_then(|expr| {
@ -1469,7 +1216,7 @@ impl<'db> SemanticsImpl<'db> {
// more than it should with the current implementation. // more than it should with the current implementation.
} }
fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
if ident_pat.ref_token().is_none() { if ident_pat.ref_token().is_none() {
return false; return false;
} }
@ -1512,7 +1259,8 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or(false) .unwrap_or(false)
} }
fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool { /// Returns `true` if the `node` is inside an `unsafe` context.
pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
let Some(enclosing_item) = let Some(enclosing_item) =
expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast) expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
else { else {