hygiene 2.0

This commit is contained in:
Lukas Wirth 2023-11-17 19:07:31 +01:00
parent e36b3f7b8c
commit 05f375eae2
43 changed files with 758 additions and 458 deletions

View file

@ -13,10 +13,9 @@ use triomphe::Arc;
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
tt::{self, Subtree},
InFile,
InFile, SpanMap,
};
/// Syntactical attributes, without filtering of `cfg_attr`s.
@ -44,7 +43,7 @@ impl RawAttrs {
db: &dyn ExpandDatabase,
span_anchor: SpanAnchor,
owner: &dyn ast::HasAttrs,
hygiene: &Hygiene,
hygiene: &SpanMap,
) -> Self {
let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr {
@ -69,8 +68,7 @@ impl RawAttrs {
span_anchor: SpanAnchor,
owner: InFile<&dyn ast::HasAttrs>,
) -> Self {
let hygiene = Hygiene::new(db, owner.file_id);
Self::new(db, span_anchor, owner.value, &hygiene)
Self::new(db, span_anchor, owner.value, &db.span_map(owner.file_id))
}
pub fn merge(&self, other: Self) -> Self {
@ -135,9 +133,7 @@ impl RawAttrs {
delimiter: tt::Delimiter::unspecified(),
token_trees: attr.to_vec(),
};
// FIXME hygiene
let hygiene = Hygiene::new_unhygienic();
Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
},
);
@ -220,7 +216,7 @@ impl Attr {
db: &dyn ExpandDatabase,
span_anchor: SpanAnchor,
ast: ast::Meta,
hygiene: &Hygiene,
hygiene: &SpanMap,
id: AttrId,
) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
@ -234,9 +230,7 @@ impl Attr {
// FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor
let offset =
db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start();
// FIXME: Spanmap
let tree =
syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, &Default::default());
let tree = syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, hygiene);
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
@ -244,18 +238,13 @@ impl Attr {
Some(Attr { id, path, input })
}
fn from_tt(
db: &dyn ExpandDatabase,
tt: &tt::Subtree,
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
// FIXME: Unecessary roundtrip tt -> ast -> tt
let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
// FIXME: we discard spans here!
Self::from_src(db, SpanAnchor::DUMMY, ast, hygiene, id)
Self::from_src(db, SpanAnchor::DUMMY, ast, &SpanMap::default(), id)
}
pub fn path(&self) -> &ModPath {
@ -295,9 +284,9 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
hygiene: &'a Hygiene,
) -> Option<impl Iterator<Item = ModPath> + 'a> {
let args = self.token_tree_value()?;
dbg!(args);
if args.delimiter.kind != DelimiterKind::Parenthesis {
return None;
@ -309,12 +298,13 @@ impl Attr {
if tts.is_empty() {
return None;
}
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here.
let subtree = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
token_trees: tts.into_iter().cloned().collect(),
token_trees: tts.to_vec(),
};
let (parse, _) =
let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
@ -323,7 +313,7 @@ impl Attr {
return None;
}
let path = meta.path()?;
ModPath::from_src(db, path, hygiene)
ModPath::from_src(db, path, &span_map)
});
Some(paths)

View file

@ -1,6 +1,6 @@
//! Defines database & queries for macro expansion.
use ::tt::SyntaxContext;
use ::tt::{SpanAnchor as _, SyntaxContext};
use base_db::{
salsa,
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
@ -17,9 +17,10 @@ use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap,
attrs::RawAttrs,
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
hygiene::{self, HygieneFrame, SyntaxContextData},
hygiene::{self, SyntaxContextData, Transparency},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap,
@ -37,16 +38,35 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
pub transparency: Transparency,
}
impl DeclarativeMacroExpander {
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt).map_err(Into::into),
None => self
.mac
.expand(&tt, |s| s.ctx = db.apply_mark(s.ctx, call_id, self.transparency))
.map_err(Into::into),
}
}
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
}
}
}
@ -83,6 +103,9 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
// TODO: transparent?
#[salsa::transparent]
fn span_map(&self, file_id: HirFileId) -> Arc<SpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@ -97,8 +120,8 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::invoke(hygiene::apply_mark)]
fn apply_mark(
&self,
ctxt: SyntaxContextData,
file_id: HirFileId,
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: hygiene::Transparency,
) -> SyntaxContextId;
@ -137,8 +160,13 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>;
}
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<SpanMap> {
match file_id.repr() {
HirFileIdRepr::FileId(_) => Arc::new(Default::default()),
HirFileIdRepr::MacroFile(m) => db.parse_macro_expansion(m).value.1,
}
}
/// This expands the given macro call, but with different arguments. This is
@ -220,7 +248,9 @@ pub fn expand_speculative(
),
)
}
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
}
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@ -229,7 +259,9 @@ pub fn expand_speculative(
};
let expand_to = macro_expand_to(db, actual_macro_call);
let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
let (node, mut rev_tmap) =
token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to);
rev_tmap.real_file = false;
let syntax_node = node.syntax_node();
let token = rev_tmap
@ -285,7 +317,8 @@ fn parse_macro_expansion(
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to);
let (parse, mut rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to);
rev_token_map.real_file = false;
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
}
@ -464,41 +497,70 @@ fn decl_macro_expander(
(parse.syntax_node(), map)
}
};
let mac = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
macro_rules.syntax().text_range().start(),
&map,
);
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
macro_def.syntax().text_range().start(),
&map,
);
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
let transparency = |node| {
// ... would be nice to have the item tree here
let attrs =
RawAttrs::new(db, SpanAnchor::DUMMY, node, &Default::default()).filter(db, def_crate);
match &*attrs
.iter()
.find(|it| {
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
})?
.token_tree_value()?
.token_trees
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
"transparent" => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque),
_ => None,
},
_ => None,
}
};
Arc::new(DeclarativeMacroExpander { mac })
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
macro_rules.syntax().text_range().start(),
&map,
);
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() },
macro_def.syntax().text_range().start(),
&map,
);
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@ -514,12 +576,15 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
}
}
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
fn macro_expand(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
let loc = db.lookup_intern_macro_call(macro_call_id);
let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
// FIXME: add firewall query for this?
let hir_file_id = loc.kind.file_id();
@ -538,7 +603,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
let _t;
expander.expand(
db,
id,
macro_call_id,
&node,
match &map {
Some(map) => map,
@ -554,7 +619,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
)
}
_ => {
let ValueResult { value, err } = db.macro_arg(id);
let ValueResult { value, err } = db.macro_arg(macro_call_id);
let Some(macro_arg) = value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
@ -570,9 +635,11 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
let arg = &*macro_arg;
match loc.def.kind {
MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@ -594,8 +661,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}),
};
}
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, macro_call_id, &arg),
_ => unreachable!(),
}
}
@ -653,10 +722,6 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
ExpandResult { value: Arc::new(tt), err }
}
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
db.lookup_intern_macro_call(id).expand_to()
}

View file

@ -19,7 +19,7 @@
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::{
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
CrateId,
};
use rustc_hash::FxHashMap;
@ -29,7 +29,6 @@ use triomphe::Arc;
use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind, SpanMap,
@ -56,8 +55,10 @@ pub fn expand_eager_macro_input(
krate,
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
// FIXME
call_site: SyntaxContextId::ROOT,
});
let ExpandResult { value: (arg_exp, _arg_exp_map), err: parse_err } =
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
// we need this map here as the expansion of the eager input fake file loses whitespace ...
// let mut ws_mapping = FxHashMap::default();
@ -70,7 +71,7 @@ pub fn expand_eager_macro_input(
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
db,
&Hygiene::new(db, macro_call.file_id),
&arg_exp_map,
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate,
resolver,
@ -131,6 +132,8 @@ pub fn expand_eager_macro_input(
error: err.clone(),
})),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
// FIXME
call_site: SyntaxContextId::ROOT,
};
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@ -146,7 +149,13 @@ fn lazy_expand(
let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id);
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
let id = def.as_lazy_macro(
db,
krate,
MacroCallKind::FnLike { ast_id, expand_to },
// FIXME
SyntaxContextId::ROOT,
);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
@ -155,7 +164,7 @@ fn lazy_expand(
fn eager_macro_recur(
db: &dyn ExpandDatabase,
hygiene: &Hygiene,
hygiene: &SpanMap,
curr: InFile<SyntaxNode>,
krate: CrateId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
@ -250,14 +259,13 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInAttr(..)
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, _tm), err } =
let ExpandResult { value: (parse, tm), err } =
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
// replace macro inside
let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
&hygiene,
&tm,
// FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()),
krate,

View file

@ -2,32 +2,31 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use syntax::{
ast::{self},
TextRange,
};
use triomphe::Arc;
use base_db::span::{MacroCallId, SyntaxContextId};
use crate::{
db::ExpandDatabase,
name::{AsName, Name},
HirFileId, InFile,
};
use crate::db::ExpandDatabase;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContextData {
// FIXME: This might only need to be Option<MacroCallId>?
outer_expn: HirFileId,
outer_transparency: Transparency,
parent: SyntaxContextId,
pub outer_expn: Option<MacroCallId>,
pub outer_transparency: Transparency,
pub parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
opaque: SyntaxContextId,
pub opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
opaque_and_semitransparent: SyntaxContextId,
/// Name of the crate to which `$crate` with this context would resolve.
dollar_crate_name: Name,
pub opaque_and_semitransparent: SyntaxContextId,
}
impl SyntaxContextData {
pub fn root() -> Self {
SyntaxContextData {
outer_expn: None,
outer_transparency: Transparency::Opaque,
parent: SyntaxContextId::ROOT,
opaque: SyntaxContextId::ROOT,
opaque_and_semitransparent: SyntaxContextId::ROOT,
}
}
}
/// A property of a macro expansion that determines how identifiers
@ -50,12 +49,130 @@ pub enum Transparency {
}
pub(super) fn apply_mark(
_db: &dyn ExpandDatabase,
_ctxt: SyntaxContextData,
_file_id: HirFileId,
_transparency: Transparency,
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
) -> SyntaxContextId {
_db.intern_syntax_context(_ctxt)
if transparency == Transparency::Opaque {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {
call_site_ctxt.normalize_to_macro_rules(db)
};
if call_site_ctxt.is_root(db) {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
//
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
// at their invocation. That is, we pretend that the macros 1.0 definition
// was defined at its invocation (i.e., inside the macros 2.0 definition)
// so that the macros 2.0 definition remains hygienic.
//
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
for (call_id, transparency) in ctxt.marks(db) {
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
}
apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
}
fn apply_mark_internal(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: Option<MacroCallId>,
transparency: Transparency,
) -> SyntaxContextId {
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
let mut opaque = syntax_context_data.opaque;
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
if transparency >= Transparency::Opaque {
let parent = opaque;
let new_opaque = SyntaxContextId::SELF_REF;
// But we can't just grab the to be allocated ID either as that would not deduplicate
// things!
// So we need a new salsa store type here ...
opaque = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
});
}
if transparency >= Transparency::SemiTransparent {
let parent = opaque_and_semitransparent;
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
});
}
let parent = ctxt;
db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent,
})
}
pub trait SyntaxContextExt {
fn is_root(self, db: &dyn ExpandDatabase) -> bool;
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
#[inline(always)]
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
match n {
SyntaxContextId::SELF_REF => p,
_ => n,
}
}
impl SyntaxContextExt for SyntaxContextId {
fn is_root(self, db: &dyn ExpandDatabase) -> bool {
db.lookup_intern_syntax_context(self).outer_expn.is_none()
}
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
}
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
}
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).parent
}
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(self);
(data.outer_expn, data.outer_transparency)
}
fn marks(mut self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
let mut marks = Vec::new();
while self != SyntaxContextId::ROOT {
marks.push(self.outer_mark(db));
self = self.parent_ctxt(db);
}
marks.reverse();
marks
}
}
// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) {
@ -64,50 +181,3 @@ pub(super) fn apply_mark(
// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) {
// self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
// }
#[derive(Clone, Debug)]
pub struct Hygiene {}
impl Hygiene {
pub fn new(_: &dyn ExpandDatabase, _: HirFileId) -> Hygiene {
Hygiene {}
}
pub fn new_unhygienic() -> Hygiene {
Hygiene {}
}
// FIXME: this should just return name
pub fn name_ref_to_name(
&self,
_: &dyn ExpandDatabase,
name_ref: ast::NameRef,
) -> Either<Name, CrateId> {
Either::Left(name_ref.as_name())
}
pub fn local_inner_macros(&self, _: &dyn ExpandDatabase, _: ast::Path) -> Option<CrateId> {
None
}
}
#[derive(Clone, Debug)]
struct HygieneFrames(Arc<HygieneFrame>);
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct HygieneFrame {}
#[derive(Debug, Clone, PartialEq, Eq)]
struct HygieneInfo {}
impl HygieneInfo {
fn _map_ident_up(&self, _: &dyn ExpandDatabase, _: TextRange) -> Option<InFile<TextRange>> {
None
}
}
impl HygieneFrame {
pub(crate) fn new(_: &dyn ExpandDatabase, _: HirFileId) -> HygieneFrame {
HygieneFrame {}
}
}

View file

@ -24,7 +24,10 @@ use triomphe::Arc;
use std::{fmt, hash::Hash, iter};
use base_db::{span::HirFileIdRepr, CrateId, FileId, FileRange, ProcMacroKind};
use base_db::{
span::{HirFileIdRepr, SyntaxContextId},
CrateId, FileId, FileRange, ProcMacroKind,
};
use either::Either;
use syntax::{
algo::{self, skip_trivia_token},
@ -105,6 +108,7 @@ pub struct MacroCallLoc {
/// for the eager input macro file.
eager: Option<Box<EagerCallInfo>>,
pub kind: MacroCallKind,
pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -330,8 +334,9 @@ impl MacroDefId {
db: &dyn db::ExpandDatabase,
krate: CrateId,
kind: MacroCallKind,
call_site: SyntaxContextId,
) -> MacroCallId {
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
}
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {

View file

@ -7,11 +7,11 @@ use std::{
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
name::{known, Name},
hygiene::{SyntaxContextExt, Transparency},
name::{known, AsName, Name},
SpanMap,
};
use base_db::CrateId;
use either::Either;
use base_db::{span::SyntaxContextId, CrateId};
use smallvec::SmallVec;
use syntax::{ast, AstNode};
@ -38,6 +38,7 @@ pub enum PathKind {
Crate,
/// Absolute path (::foo)
Abs,
// FIXME: Remove this
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
@ -46,7 +47,7 @@ impl ModPath {
pub fn from_src(
db: &dyn ExpandDatabase,
path: ast::Path,
hygiene: &Hygiene,
hygiene: &SpanMap,
) -> Option<ModPath> {
convert_path(db, None, path, hygiene)
}
@ -193,7 +194,7 @@ fn convert_path(
db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
hygiene: &Hygiene,
hygiene: &SpanMap,
) -> Option<ModPath> {
let prefix = match path.qualifier() {
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
@ -203,23 +204,26 @@ fn convert_path(
let segment = path.segment()?;
let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
match hygiene.name_ref_to_name(db, name_ref) {
Either::Left(name) => {
// no type args in use
let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
)
});
res.segments.push(name);
res
}
Either::Right(crate_id) => {
return Some(ModPath::from_segments(
PathKind::DollarCrate(crate_id),
iter::empty(),
))
if name_ref.text() == "$crate" {
if prefix.is_some() {
return None;
}
resolve_crate_root(
db,
hygiene
.span_for_range(name_ref.syntax().text_range())
.map_or(SyntaxContextId::ROOT, |s| s.ctx),
)
.map(PathKind::DollarCrate)
.map(ModPath::from_kind)?
} else {
let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
)
});
res.segments.push(name_ref.as_name());
res
}
}
ast::PathSegmentKind::SelfTypeKw => {
@ -261,8 +265,15 @@ fn convert_path(
// We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
mod_path.kind = PathKind::DollarCrate(crate_id);
let syn_ctx = hygiene
.span_for_range(segment.syntax().text_range())
.map_or(SyntaxContextId::ROOT, |s| s.ctx);
if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
if let Some(crate_root) = resolve_crate_root(db, syn_ctx) {
mod_path.kind = PathKind::DollarCrate(crate_root);
}
}
}
}
}
@ -270,6 +281,40 @@ fn convert_path(
Some(mod_path)
}
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
// definitions actually produced by `macro` and `macro` definitions produced by
// `macro_rules!`, but at least such configurations are not stable yet.
ctxt = ctxt.normalize_to_macro_rules(db);
let mut iter = ctxt.marks(db).into_iter().rev().peekable();
let mut result_mark = None;
// Find the last opaque mark from the end if it exists.
while let Some(&(mark, transparency)) = iter.peek() {
if transparency == Transparency::Opaque {
result_mark = Some(mark);
iter.next();
} else {
break;
}
}
// Then find the last semi-transparent mark from the end if it exists.
for (mark, transparency) in iter {
if transparency == Transparency::SemiTransparent {
result_mark = Some(mark);
} else {
break;
}
}
match result_mark {
Some(Some(call)) => Some(db.lookup_intern_macro_call(call.into()).def.krate),
Some(None) | None => None,
}
}
pub use crate::name as __name;
#[macro_export]

View file

@ -470,6 +470,7 @@ pub mod known {
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
#[macro_export]
macro_rules! name {