//! See `Semantics`. mod source_to_def; use std::{ cell::RefCell, convert::Infallible, fmt, iter, mem, ops::{self, ControlFlow, Not}, }; use either::Either; use hir_def::{ hir::Expr, lower::LowerCtx, nameres::MacroSubNs, path::ModPath, resolver::{self, HasResolver, Resolver, TypeNs}, type_ref::Mutability, AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ attrs::collect_attrs, builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::InRealFile, inert_attr_macro::find_builtin_attr_idx, name::AsName, FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, }; use intern::Symbol; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; use span::{EditionedFileId, FileId}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, }; use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{resolve_hir_path, SourceAnalyzer}, Access, Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam, Crate, DeriveHelper, Enum, Field, Function, HasSource, HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, }; const CONTINUE_NO_BREAKS: ControlFlow = ControlFlow::Continue(()); #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item Def(ModuleDef), /// A local binding (only value namespace) Local(Local), /// A type parameter TypeParam(TypeParam), /// A const parameter ConstParam(ConstParam), SelfType(Impl), BuiltinAttr(BuiltinAttr), ToolModule(ToolModule), DeriveHelper(DeriveHelper), } impl PathResolution { pub(crate) fn in_type_ns(&self) -> Option { match self { PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { Some(TypeNs::BuiltinType((*builtin).into())) } PathResolution::Def( ModuleDef::Const(_) | ModuleDef::Variant(_) | ModuleDef::Macro(_) | ModuleDef::Function(_) | ModuleDef::Module(_) | ModuleDef::Static(_) | ModuleDef::Trait(_) | ModuleDef::TraitAlias(_), ) => None, PathResolution::Def(ModuleDef::TypeAlias(alias)) => { Some(TypeNs::TypeAliasId((*alias).into())) } PathResolution::BuiltinAttr(_) | PathResolution::ToolModule(_) | PathResolution::Local(_) | PathResolution::DeriveHelper(_) | PathResolution::ConstParam(_) => None, PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), } } } #[derive(Debug)] pub struct TypeInfo { /// The original type of the expression or pattern. pub original: Type, /// The adjusted type, if an adjustment happened. pub adjusted: Option, } impl TypeInfo { pub fn original(self) -> Type { self.original } pub fn has_adjustment(&self) -> bool { self.adjusted.is_some() } /// The adjusted type, or the original in case no adjustments occurred. pub fn adjusted(self) -> Type { self.adjusted.unwrap_or(self.original) } } /// Primary API to get semantic information, like types, from syntax trees. pub struct Semantics<'db, DB> { pub db: &'db DB, imp: SemanticsImpl<'db>, } pub struct SemanticsImpl<'db> { pub db: &'db dyn HirDatabase, s2d_cache: RefCell, /// Rootnode to HirFileId cache root_to_file_cache: RefCell>, /// MacroCall to its expansion's MacroFileId cache macro_call_cache: RefCell, MacroFileId>>, } impl fmt::Debug for Semantics<'_, DB> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Semantics {{ ... }}") } } impl<'db, DB> ops::Deref for Semantics<'db, DB> { type Target = SemanticsImpl<'db>; fn deref(&self) -> &Self::Target { &self.imp } } impl<'db, DB: HirDatabase> Semantics<'db, DB> { pub fn new(db: &DB) -> Semantics<'_, DB> { let impl_ = SemanticsImpl::new(db); Semantics { db, imp: impl_ } } pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId { self.imp.find_file(syntax_node).file_id } pub fn token_ancestors_with_macros( &self, token: SyntaxToken, ) -> impl Iterator + '_ { token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it)) } /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, /// search up until it is of the target AstNode type pub fn find_node_at_offset_with_macros( &self, node: &SyntaxNode, offset: TextSize, ) -> Option { self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) } /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, /// descend it and find again // FIXME: Rethink this API pub fn find_node_at_offset_with_descend( &self, node: &SyntaxNode, offset: TextSize, ) -> Option { self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast) } /// Find an AstNode by offset inside SyntaxNode, if it is inside an attribute macro call, /// descend it and find again // FIXME: Rethink this API pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>( &'slf self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + 'slf { self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) } pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { self.imp.resolve_await_to_poll(await_expr).map(Function::from) } pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option { self.imp.resolve_prefix_expr(prefix_expr).map(Function::from) } pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option { self.imp.resolve_index_expr(index_expr).map(Function::from) } pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option { self.imp.resolve_bin_expr(bin_expr).map(Function::from) } pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option { self.imp.resolve_try_expr(try_expr).map(Function::from) } pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { self.imp.resolve_variant(record_lit).map(VariantDef::from) } pub fn file_to_module_def(&self, file: impl Into) -> Option { self.imp.file_to_module_defs(file.into()).next() } pub fn file_to_module_defs(&self, file: impl Into) -> impl Iterator { self.imp.file_to_module_defs(file.into()) } pub fn to_adt_def(&self, a: &ast::Adt) -> Option { self.imp.to_def(a).map(Adt::from) } pub fn to_const_def(&self, c: &ast::Const) -> Option { self.imp.to_def(c).map(Const::from) } pub fn to_enum_def(&self, e: &ast::Enum) -> Option { self.imp.to_def(e).map(Enum::from) } pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option { self.imp.to_def(v).map(Variant::from) } pub fn to_fn_def(&self, f: &ast::Fn) -> Option { self.imp.to_def(f).map(Function::from) } pub fn to_impl_def(&self, i: &ast::Impl) -> Option { self.imp.to_def(i).map(Impl::from) } pub fn to_macro_def(&self, m: &ast::Macro) -> Option { self.imp.to_def(m).map(Macro::from) } pub fn to_module_def(&self, m: &ast::Module) -> Option { self.imp.to_def(m).map(Module::from) } pub fn to_static_def(&self, s: &ast::Static) -> Option { self.imp.to_def(s).map(Static::from) } pub fn to_struct_def(&self, s: &ast::Struct) -> Option { self.imp.to_def(s).map(Struct::from) } pub fn to_trait_alias_def(&self, t: &ast::TraitAlias) -> Option { self.imp.to_def(t).map(TraitAlias::from) } pub fn to_trait_def(&self, t: &ast::Trait) -> Option { self.imp.to_def(t).map(Trait::from) } pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option { self.imp.to_def(t).map(TypeAlias::from) } pub fn to_union_def(&self, u: &ast::Union) -> Option { self.imp.to_def(u).map(Union::from) } } impl<'db> SemanticsImpl<'db> { fn new(db: &'db dyn HirDatabase) -> Self { SemanticsImpl { db, s2d_cache: Default::default(), root_to_file_cache: Default::default(), macro_call_cache: Default::default(), } } pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile { let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); tree } pub fn attach_first_edition(&self, file: FileId) -> Option { Some(EditionedFileId::new( file, self.file_to_module_defs(file).next()?.krate().edition(self.db), )) } pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile { let file_id = self .attach_first_edition(file_id) .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); tree } pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode { let node = self.db.parse_or_expand(file_id); self.cache(node.clone(), file_id); node } pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze_no_infer(macro_call.syntax())?; let macro_call = InFile::new(sa.file_id, macro_call); let file_id = if let Some(call) = ::to_def(self, macro_call) { call.as_macro_file() } else { sa.expand(self.db, macro_call)? }; let node = self.parse_or_expand(file_id.into()); Some(node) } /// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy /// expansions. pub fn expand_allowed_builtins(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze_no_infer(macro_call.syntax())?; let macro_call = InFile::new(sa.file_id, macro_call); let file_id = if let Some(call) = ::to_def(self, macro_call) { call.as_macro_file() } else { sa.expand(self.db, macro_call)? }; let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id); let skip = matches!( macro_call.def.kind, hir_expand::MacroDefKind::BuiltIn( _, BuiltinFnLikeExpander::Column | BuiltinFnLikeExpander::File | BuiltinFnLikeExpander::ModulePath | BuiltinFnLikeExpander::Asm | BuiltinFnLikeExpander::GlobalAsm | BuiltinFnLikeExpander::LogSyntax | BuiltinFnLikeExpander::TraceMacros | BuiltinFnLikeExpander::FormatArgs | BuiltinFnLikeExpander::FormatArgsNl | BuiltinFnLikeExpander::ConstFormatArgs, ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError) ); if skip { // these macros expand to custom builtin syntax and/or dummy things, no point in // showing these to the user return None; } let node = self.parse_or_expand(file_id.into()); Some(node) } /// If `item` has an attribute macro attached to it, expands it. pub fn expand_attr_macro(&self, item: &ast::Item) -> Option { let src = self.wrap_node_infile(item.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?; Some(self.parse_or_expand(macro_call_id.as_file())) } pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option { let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let src = self.wrap_node_infile(attr.clone()); let call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it) })?; Some(self.parse_or_expand(call_id.as_file())) } pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option>> { let calls = self.derive_macro_calls(attr)?; self.with_ctx(|ctx| { Some( calls .into_iter() .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id })) .collect(), ) }) } pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option> { let res: Vec<_> = self .derive_macro_calls(attr)? .into_iter() .flat_map(|call| { let file_id = call?.as_file(); let node = self.db.parse_or_expand(file_id); self.cache(node.clone(), file_id); Some(node) }) .collect(); Some(res) } fn derive_macro_calls(&self, attr: &ast::Attr) -> Option>> { let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let file_id = self.find_file(adt.syntax()).file_id; let adt = InFile::new(file_id, &adt); let src = InFile::new(file_id, attr.clone()); self.with_ctx(|ctx| { let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?; Some(res.to_vec()) }) } pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool { let file_id = self.find_file(adt.syntax()).file_id; let adt = InFile::new(file_id, adt); self.with_ctx(|ctx| ctx.has_derives(adt)) } pub fn derive_helper(&self, attr: &ast::Attr) -> Option> { let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it { ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), ast::Item::Union(it) => Some(ast::Adt::Union(it)), _ => None, })?; let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); let sa = self.analyze_no_infer(adt.syntax())?; let id = self.db.ast_id_map(sa.file_id).ast_id(&adt); let res: Vec<_> = sa .resolver .def_map() .derive_helpers_in_scope(InFile::new(sa.file_id, id))? .iter() .filter(|&(name, _, _)| *name == attr_name) .map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file())) .collect(); res.is_empty().not().then_some(res) } pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool { let file_id = self.find_file(item.syntax()).file_id; let src = InFile::new(file_id, item); self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) } /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the /// expansion. `token_to_map` should be a token from the `speculative args` node. pub fn speculative_expand( &self, actual_macro_call: &ast::MacroCall, speculative_args: &ast::TokenTree, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(actual_macro_call.syntax())?; let macro_call = InFile::new(file_id, actual_macro_call); let krate = resolver.krate(); let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang)) })?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, speculative_args.syntax(), token_to_map, ) } pub fn speculative_expand_raw( &self, macro_file: MacroFileId, speculative_args: &SyntaxNode, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { hir_expand::db::expand_speculative( self.db.upcast(), macro_file.macro_call_id, speculative_args, token_to_map, ) } /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the /// expansion. `token_to_map` should be a token from the `speculative args` node. pub fn speculative_expand_attr_macro( &self, actual_macro_call: &ast::Item, speculative_args: &ast::Item, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let macro_call = self.wrap_node_infile(actual_macro_call.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, speculative_args.syntax(), token_to_map, ) } pub fn speculative_expand_derive_as_pseudo_attr_macro( &self, actual_macro_call: &ast::Attr, speculative_args: &ast::Attr, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let attr = self.wrap_node_infile(actual_macro_call.clone()); let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?; let macro_call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it) })?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, speculative_args.syntax(), token_to_map, ) } /// Retrieves all the formatting parts of the format_args! (or `asm!`) template string. pub fn as_format_args_parts( &self, string: &ast::String, ) -> Option>)>> { let quote = string.open_quote_text_range()?; let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?; self.descend_into_macros_breakable(token, |token| { (|| { let token = token.value; let string = ast::String::cast(token)?; let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; let parent = literal.parent()?; if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) { let source_analyzer = self.analyze_no_infer(format_args.syntax())?; let format_args = self.wrap_node_infile(format_args); let res = source_analyzer .as_format_args_parts(self.db, format_args.as_ref())? .map(|(range, res)| (range + quote.end(), res.map(Either::Left))) .collect(); Some(res) } else { let asm = ast::AsmExpr::cast(parent)?; let source_analyzer = self.analyze_no_infer(asm.syntax())?; let asm = self.wrap_node_infile(asm); let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?; let res = asm_parts .iter() .map(|&(range, index)| { ( range + quote.end(), Some(Either::Right(InlineAsmOperand { owner, expr, index })), ) }) .collect(); Some(res) } })() .map_or(ControlFlow::Continue(()), ControlFlow::Break) }) } /// Retrieves the formatting part of the format_args! template string at the given offset. pub fn check_for_format_args_template( &self, original_token: SyntaxToken, offset: TextSize, ) -> Option<(TextRange, Option>)> { let original_string = ast::String::cast(original_token.clone())?; let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?; let quote = original_string.open_quote_text_range()?; self.descend_into_macros_breakable(original_token, |token| { (|| { let token = token.value; self.resolve_offset_in_format_args( ast::String::cast(token)?, offset.checked_sub(quote.end())?, ) .map(|(range, res)| (range + quote.end(), res)) })() .map_or(ControlFlow::Continue(()), ControlFlow::Break) }) } fn resolve_offset_in_format_args( &self, string: ast::String, offset: TextSize, ) -> Option<(TextRange, Option>)> { debug_assert!(offset <= string.syntax().text_range().len()); let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; let parent = literal.parent()?; if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) { let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; let format_args = self.wrap_node_infile(format_args); source_analyzer .resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) .map(|(range, res)| (range, res.map(Either::Left))) } else { let asm = ast::AsmExpr::cast(parent)?; let source_analyzer = &self.analyze_no_infer(asm.syntax())?; let asm = self.wrap_node_infile(asm); source_analyzer.resolve_offset_in_asm_template(asm.as_ref(), offset).map( |(owner, (expr, range, index))| { (range, Some(Either::Right(InlineAsmOperand { owner, expr, index }))) }, ) } } /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now let mut res = smallvec![]; let tokens = (|| { // FIXME: the trivia skipping should not be necessary let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?; Some((first, last)) })(); let (first, last) = match tokens { Some(it) => it, None => return res, }; let file = self.find_file(node.syntax()); let Some(file_id) = file.file_id.file_id() else { return res; }; if first == last { // node is just the token, so descend the token self.descend_into_macros_impl( InRealFile::new(file_id, first), &mut |InFile { value, .. }| { if let Some(node) = value .parent_ancestors() .take_while(|it| it.text_range() == value.text_range()) .find_map(N::cast) { res.push(node) } CONTINUE_NO_BREAKS }, ); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| { scratch.push(token); CONTINUE_NO_BREAKS }); let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( InRealFile::new(file_id, last), &mut |InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { if let Some(p) = first.parent() { let range = first.text_range().cover(last.text_range()); let node = find_root(&p) .covering_element(range) .ancestors() .take_while(|it| it.text_range() == range) .find_map(N::cast); if let Some(node) = node { res.push(node); } } } } CONTINUE_NO_BREAKS }, ); } res } fn is_inside_macro_call(token: &SyntaxToken) -> bool { token.parent_ancestors().any(|ancestor| { if ast::MacroCall::can_cast(ancestor.kind()) { return true; } // Check if it is an item (only items can have macro attributes) that has a non-builtin attribute. let Some(item) = ast::Item::cast(ancestor) else { return false }; item.attrs().any(|attr| { let Some(meta) = attr.meta() else { return false }; let Some(path) = meta.path() else { return false }; let Some(attr_name) = path.as_single_name_ref() else { return true }; let attr_name = attr_name.text(); let attr_name = attr_name.as_str(); attr_name == "derive" || find_builtin_attr_idx(&Symbol::intern(attr_name)).is_none() }) }) } pub fn descend_into_macros_exact_if_in_macro( &self, token: SyntaxToken, ) -> SmallVec<[SyntaxToken; 1]> { if Self::is_inside_macro_call(&token) { self.descend_into_macros_exact(token) } else { smallvec![token] } } pub fn descend_into_macros_cb( &self, token: SyntaxToken, mut cb: impl FnMut(InFile), ) { if let Ok(token) = self.wrap_token_infile(token).into_real_file() { self.descend_into_macros_impl(token, &mut |t| { cb(t); CONTINUE_NO_BREAKS }); } } pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut res = smallvec![]; if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { self.descend_into_macros_impl(token, &mut |t| { res.push(t.value); CONTINUE_NO_BREAKS }); } if res.is_empty() { res.push(token); } res } pub fn descend_into_macros_breakable( &self, token: InRealFile, mut cb: impl FnMut(InFile) -> ControlFlow, ) -> Option { self.descend_into_macros_impl(token.clone(), &mut cb) } /// Descends the token into expansions, returning the tokens that matches the input /// token's [`SyntaxKind`] and text. pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut r = smallvec![]; let text = token.text(); let kind = token.kind(); self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); if matches { r.push(value); } }); if r.is_empty() { r.push(token); } r } /// Descends the token into expansions, returning the first token that matches the input /// token's [`SyntaxKind`] and text. pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken { let text = token.text(); let kind = token.kind(); if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) } }) } else { None } .unwrap_or(token) } fn descend_into_macros_impl( &self, InRealFile { value: token, file_id }: InRealFile, f: &mut dyn FnMut(InFile) -> ControlFlow, ) -> Option { let _p = tracing::info_span!("descend_into_macros_impl").entered(); let (sa, span, file_id) = token .parent() .and_then(|parent| { self.analyze_impl(InRealFile::new(file_id, &parent).into(), None, false) }) .and_then(|sa| { let file_id = sa.file_id.file_id()?; Some(( sa, self.db.real_span_map(file_id).span_for_range(token.text_range()), HirFileId::from(file_id), )) })?; let mut m_cache = self.macro_call_cache.borrow_mut(); let def_map = sa.resolver.def_map(); // A stack of tokens to process, along with the file they came from // These are tracked to know which macro calls we still have to look into // the tokens themselves aren't that interesting as the span that is being used to map // things down never changes. let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| { Some( ctx.cache .expansion_info_cache .entry(macro_file) .or_insert_with(|| { let exp_info = macro_file.expansion_info(self.db.upcast()); let InMacroFile { file_id, value } = exp_info.expanded(); self.cache(value, file_id.into()); exp_info }) .map_range_down(span)? .map(SmallVec::<[_; 2]>::from_iter), ) })?; // we have found a mapping for the token if the vec is non-empty let res = mapped_tokens.is_empty().not().then_some(()); // requeue the tokens we got from mapping our current token down stack.push((HirFileId::from(file_id), mapped_tokens)); res }; // Filters out all tokens that contain the given range (usually the macro call), any such // token is redundant as the corresponding macro call has already been processed let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| { tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range())) }; while let Some((expansion, ref mut tokens)) = stack.pop() { while let Some(token) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations let containing_attribute_macro_call = self.with_ctx(|ctx| { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { // Don't force populate the dyn cache for items that don't have an attribute anyways item.attrs().next()?; Some((ctx.item_to_macro_call(InFile::new(expansion, &item))?, item)) }) }); if let Some((call_id, item)) = containing_attribute_macro_call { let file_id = call_id.as_macro_file(); let attr_id = match self.db.lookup_intern_macro_call(call_id).kind { hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => { invoc_attr_index.ast_index() } _ => 0, }; // FIXME: here, the attribute's text range is used to strip away all // entries from the start of the attribute "list" up the invoking // attribute. But in // ``` // mod foo { // #![inner] // } // ``` // we don't wanna strip away stuff in the `mod foo {` range, that is // here if the id corresponds to an inner attribute we got strip all // text ranges of the outer ones, and then all of the inner ones up // to the invoking attribute so that the inbetween is ignored. let text_range = item.syntax().text_range(); let start = collect_attrs(&item) .nth(attr_id) .map(|attr| match attr.1 { Either::Left(it) => it.syntax().text_range().start(), Either::Right(it) => it.syntax().text_range().start(), }) .unwrap_or_else(|| text_range.start()); let text_range = TextRange::new(start, text_range.end()); filter_duplicates(tokens, text_range); return process_expansion_for_token(&mut stack, file_id); } // Then check for token trees, that means we are either in a function-like macro or // secondary attribute inputs let tt = token .parent_ancestors() .map_while(Either::::cast) .last()?; match tt { // function-like macro call Either::Left(tt) => { if tt.left_delimiter_token().map_or(false, |it| it == token) { return None; } if tt.right_delimiter_token().map_or(false, |it| it == token) { return None; } let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; let mcall = InFile::new(expansion, macro_call); let file_id = match m_cache.get(&mcall) { Some(&it) => it, None => { let it = if let Some(call) = ::to_def( self, mcall.as_ref(), ) { call.as_macro_file() } else { sa.expand(self.db, mcall.as_ref())? }; m_cache.insert(mcall, it); it } }; let text_range = tt.syntax().text_range(); filter_duplicates(tokens, text_range); process_expansion_for_token(&mut stack, file_id).or(file_id .eager_arg(self.db.upcast()) .and_then(|arg| { // also descend into eager expansions process_expansion_for_token(&mut stack, arg.as_macro_file()) })) } // derive or derive helper Either::Right(meta) => { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute let attr = meta.parent_attr()?; let adt = match attr.syntax().parent().and_then(ast::Adt::cast) { Some(adt) => { // this might be a derive on an ADT let derive_call = self.with_ctx(|ctx| { // so try downmapping the token into the pseudo derive expansion // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works ctx.attr_to_derive_macro_call( InFile::new(expansion, &adt), InFile::new(expansion, attr.clone()), ) .map(|(_, call_id, _)| call_id) }); match derive_call { Some(call_id) => { // resolved to a derive let file_id = call_id.as_macro_file(); let text_range = attr.syntax().text_range(); // remove any other token in this macro input, all their mappings are the // same as this tokens.retain(|t| { !text_range.contains_range(t.text_range()) }); return process_expansion_for_token( &mut stack, file_id, ); } None => Some(adt), } } None => { // Otherwise this could be a derive helper on a variant or field attr.syntax().ancestors().find_map(ast::Item::cast).and_then( |it| match it { ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), ast::Item::Union(it) => Some(ast::Adt::Union(it)), _ => None, }, ) } }?; if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(expansion, &adt))) { return None; } let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); // Not an attribute, nor a derive, so it's either an intert attribute or a derive helper // Try to resolve to a derive helper and downmap let id = self.db.ast_id_map(expansion).ast_id(&adt); let helpers = def_map.derive_helpers_in_scope(InFile::new(expansion, id))?; if !helpers.is_empty() { let text_range = attr.syntax().text_range(); filter_duplicates(tokens, text_range); } let mut res = None; for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { // as there may be multiple derives registering the same helper // name, we gotta make sure to call this for all of them! // FIXME: We need to call `f` for all of them as well though! res = res.or(process_expansion_for_token( &mut stack, derive.as_macro_file(), )); } res } } })() .is_none(); if was_not_remapped { if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) { return Some(b); } } } } None } // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop // traversing the inner iterator when it finds a node. // The outer iterator is over the tokens descendants // The inner iterator is the ancestors of a descendant fn descend_node_at_offset( &self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) .map(move |token| self.descend_into_macros_exact(token)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first // See algo::ancestors_at_offset, which uses the same approach .kmerge_by(|left, right| { left.clone() .map(|node| node.text_range().len()) .lt(right.clone().map(|node| node.text_range().len())) }) } /// Attempts to map the node out of macro expanded files returning the original file range. /// If upmapping is not possible, this will fall back to the range of the macro call of the /// macro file the node resides in. pub fn original_range(&self, node: &SyntaxNode) -> FileRange { let node = self.find_file(node); node.original_file_range_rooted(self.db.upcast()) } /// Attempts to map the node out of macro expanded files returning the original file range. pub fn original_range_opt(&self, node: &SyntaxNode) -> Option { let node = self.find_file(node); node.original_file_range_opt(self.db.upcast()) .filter(|(_, ctx)| ctx.is_root()) .map(TupleExt::head) .map(Into::into) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node(&self, node: N) -> Option { self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map( |InRealFile { file_id, value }| { self.cache(find_root(value.syntax()), file_id.into()); value }, ) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option { let InFile { file_id, .. } = self.find_file(node); InFile::new(file_id, node).original_syntax_node_rooted(self.db.upcast()).map( |InRealFile { file_id, value }| { self.cache(find_root(&value), file_id.into()); value }, ) } pub fn diagnostics_display_range(&self, src: InFile) -> FileRange { let root = self.parse_or_expand(src.file_id); let node = src.map(|it| it.to_node(&root)); node.as_ref().original_file_range_rooted(self.db.upcast()) } fn token_ancestors_with_macros( &self, token: SyntaxToken, ) -> impl Iterator + Clone + '_ { token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent)) } /// Iterates the ancestors of the given node, climbing up macro expansions while doing so. pub fn ancestors_with_macros( &self, node: SyntaxNode, ) -> impl Iterator + Clone + '_ { let node = self.find_file(&node); iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| { match value.parent() { Some(parent) => Some(InFile::new(file_id, parent)), None => { let macro_file = file_id.macro_file()?; self.with_ctx(|ctx| { let expansion_info = ctx .cache .expansion_info_cache .entry(macro_file) .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); expansion_info.arg().map(|node| node?.parent()).transpose() }) } } }) .map(|it| it.value) } pub fn ancestors_at_offset_with_macros( &self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + '_ { node.token_at_offset(offset) .map(|token| self.token_ancestors_with_macros(token)) .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) } pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option { let text = lifetime.text(); let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| { let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?; gpl.lifetime_params() .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text)) })?; let src = self.wrap_node_infile(lifetime_param); ToDef::to_def(self, src.as_ref()) } pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option