9970: feat: Implement attribute input token mapping, fix attribute item token mapping r=Veykril a=Veykril

![image](https://user-images.githubusercontent.com/3757771/130328577-4c1ad72c-51b1-47c3-8d3d-3242ec44a355.png)

The token mapping for items with attributes got overwritten partially by the attributes non-item input, since attributes have two different inputs, the item and the direct input both.
This PR gives attributes a second TokenMap for its direct input. We now shift all normal input IDs by the item input maximum(we maybe wanna swap this see below) similar to what we do for macro-rules/def. For mapping down we then have to figure out whether we are inside the direct attribute input or its item input to pick the appropriate mapping which can be done with some token range comparisons.

Fixes https://github.com/rust-analyzer/rust-analyzer/issues/9867

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
bors[bot] 2021-08-27 19:30:36 +00:00 committed by GitHub
commit 97409e5fc8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 196 additions and 88 deletions

View file

@ -474,7 +474,7 @@ impl<'db> SemanticsImpl<'db> {
.entry(file_id) .entry(file_id)
.or_insert_with(|| file_id.expansion_info(self.db.upcast())) .or_insert_with(|| file_id.expansion_info(self.db.upcast()))
.as_ref()? .as_ref()?
.map_token_down(token.as_ref())?; .map_token_down(self.db.upcast(), None, token.as_ref())?;
if let Some(parent) = token.value.parent() { if let Some(parent) = token.value.parent() {
self.cache(find_root(&parent), token.file_id); self.cache(find_root(&parent), token.file_id);
@ -483,24 +483,21 @@ impl<'db> SemanticsImpl<'db> {
return Some(token); return Some(token);
}, },
ast::Item(item) => { ast::Item(item) => {
match self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item))) { if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
Some(call_id) => { let file_id = call_id.as_file();
let file_id = call_id.as_file(); let token = self
let token = self .expansion_info_cache
.expansion_info_cache .borrow_mut()
.borrow_mut() .entry(file_id)
.entry(file_id) .or_insert_with(|| file_id.expansion_info(self.db.upcast()))
.or_insert_with(|| file_id.expansion_info(self.db.upcast())) .as_ref()?
.as_ref()? .map_token_down(self.db.upcast(), Some(item), token.as_ref())?;
.map_token_down(token.as_ref())?;
if let Some(parent) = token.value.parent() { if let Some(parent) = token.value.parent() {
self.cache(find_root(&parent), token.file_id); self.cache(find_root(&parent), token.file_id);
}
return Some(token);
} }
None => {}
return Some(token);
} }
}, },
_ => {} _ => {}
@ -512,7 +509,6 @@ impl<'db> SemanticsImpl<'db> {
}) })
.last() .last()
.unwrap(); .unwrap();
token.value token.value
} }

View file

@ -2,7 +2,9 @@
use std::{ use std::{
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
fmt, ops, fmt,
hash::Hash,
ops,
sync::Arc, sync::Arc,
}; };
@ -158,7 +160,7 @@ impl RawAttrs {
} }
let subtree = match attr.input.as_deref() { let subtree = match attr.input.as_deref() {
Some(AttrInput::TokenTree(it)) => it, Some(AttrInput::TokenTree(it, _)) => it,
_ => return smallvec![attr.clone()], _ => return smallvec![attr.clone()],
}; };
@ -258,7 +260,7 @@ impl Attrs {
pub fn docs(&self) -> Option<Documentation> { pub fn docs(&self) -> Option<Documentation> {
let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? { let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? {
AttrInput::Literal(s) => Some(s), AttrInput::Literal(s) => Some(s),
AttrInput::TokenTree(_) => None, AttrInput::TokenTree(..) => None,
}); });
let indent = docs let indent = docs
.clone() .clone()
@ -463,7 +465,7 @@ impl AttrsWithOwner {
// FIXME: code duplication in `docs` above // FIXME: code duplication in `docs` above
let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? { let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? {
AttrInput::Literal(s) => Some((s, attr.id)), AttrInput::Literal(s) => Some((s, attr.id)),
AttrInput::TokenTree(_) => None, AttrInput::TokenTree(..) => None,
}); });
let indent = docs let indent = docs
.clone() .clone()
@ -652,14 +654,14 @@ pub enum AttrInput {
/// `#[attr = "string"]` /// `#[attr = "string"]`
Literal(SmolStr), Literal(SmolStr),
/// `#[attr(subtree)]` /// `#[attr(subtree)]`
TokenTree(Subtree), TokenTree(tt::Subtree, mbe::TokenMap),
} }
impl fmt::Display for AttrInput { impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
AttrInput::TokenTree(subtree) => subtree.fmt(f), AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
} }
} }
} }
@ -679,7 +681,8 @@ impl Attr {
}; };
Some(Interned::new(AttrInput::Literal(value))) Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() { } else if let Some(tt) = ast.token_tree() {
Some(Interned::new(AttrInput::TokenTree(syntax_node_to_token_tree(tt.syntax()).0))) let (tree, map) = syntax_node_to_token_tree(tt.syntax());
Some(Interned::new(AttrInput::TokenTree(tree, map)))
} else { } else {
None None
}; };
@ -709,7 +712,7 @@ impl Attr {
} }
match self.input.as_deref() { match self.input.as_deref() {
Some(AttrInput::TokenTree(args)) => { Some(AttrInput::TokenTree(args, _)) => {
let mut counter = 0; let mut counter = 0;
let paths = args let paths = args
.token_trees .token_trees
@ -756,7 +759,7 @@ pub struct AttrQuery<'a> {
impl<'a> AttrQuery<'a> { impl<'a> AttrQuery<'a> {
pub fn tt_values(self) -> impl Iterator<Item = &'a Subtree> { pub fn tt_values(self) -> impl Iterator<Item = &'a Subtree> {
self.attrs().filter_map(|attr| match attr.input.as_deref()? { self.attrs().filter_map(|attr| match attr.input.as_deref()? {
AttrInput::TokenTree(it) => Some(it), AttrInput::TokenTree(it, _) => Some(it),
_ => None, _ => None,
}) })
} }

View file

@ -786,13 +786,13 @@ fn attr_macro_as_call_id(
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?; .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let mut arg = match &macro_attr.input { let mut arg = match &macro_attr.input {
Some(input) => match &**input { Some(input) => match &**input {
attr::AttrInput::Literal(_) => tt::Subtree::default(), attr::AttrInput::Literal(_) => Default::default(),
attr::AttrInput::TokenTree(tt) => tt.clone(), attr::AttrInput::TokenTree(tt, map) => (tt.clone(), map.clone()),
}, },
None => tt::Subtree::default(), None => Default::default(),
}; };
// The parentheses are always disposed here. // The parentheses are always disposed here.
arg.delimiter = None; arg.0.delimiter = None;
let res = def.as_lazy_macro( let res = def.as_lazy_macro(
db.upcast(), db.upcast(),

View file

@ -289,7 +289,7 @@ impl DefCollector<'_> {
|| *attr_name == hir_expand::name![register_tool] || *attr_name == hir_expand::name![register_tool]
{ {
match attr.input.as_deref() { match attr.input.as_deref() {
Some(AttrInput::TokenTree(subtree)) => match &*subtree.token_trees { Some(AttrInput::TokenTree(subtree, _)) => match &*subtree.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(name))] => name.as_name(), [tt::TokenTree::Leaf(tt::Leaf::Ident(name))] => name.as_name(),
_ => continue, _ => continue,
}, },

View file

@ -223,7 +223,7 @@ fn parse_macro_expansion(
Ok(it) => it, Ok(it) => it,
Err(err) => { Err(err) => {
log::debug!( log::debug!(
"failed to parse expanstion to {:?} = {}", "failed to parse expansion to {:?} = {}",
fragment_kind, fragment_kind,
tt.as_debug_string() tt.as_debug_string()
); );
@ -386,11 +386,15 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::
}; };
let attr_arg = match &loc.kind { let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => Some(attr_args), MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
Some(attr_args)
}
_ => None, _ => None,
}; };
expander.expand(db, loc.krate, &macro_arg.0, attr_arg) expander.expand(db, loc.krate, &macro_arg.0, attr_arg.as_ref())
} }
fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool {

View file

@ -9,12 +9,15 @@ use db::TokenExpander;
use either::Either; use either::Either;
use mbe::Origin; use mbe::Origin;
use parser::SyntaxKind; use parser::SyntaxKind;
use syntax::{ast, AstNode, SyntaxNode, TextRange, TextSize}; use syntax::{
ast::{self, AttrsOwner},
AstNode, SyntaxNode, TextRange, TextSize,
};
use crate::{ use crate::{
db::{self, AstDatabase}, db::{self, AstDatabase},
name::{AsName, Name}, name::{AsName, Name},
HirFileId, HirFileIdRepr, InFile, MacroCallLoc, MacroDefKind, MacroFile, HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
}; };
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -121,11 +124,12 @@ impl HygieneFrames {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
struct HygieneInfo { struct HygieneInfo {
file: MacroFile, file: MacroFile,
/// The `macro_rules!` arguments. /// The start offset of the `macro_rules!` arguments or attribute input.
def_start: Option<InFile<TextSize>>, attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: Arc<TokenExpander>, macro_def: Arc<TokenExpander>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>, exp_map: Arc<mbe::TokenMap>,
} }
@ -136,22 +140,34 @@ impl HygieneInfo {
token: TextRange, token: TextRange,
) -> Option<(InFile<TextRange>, Origin)> { ) -> Option<(InFile<TextRange>, Origin)> {
let token_id = self.exp_map.token_by_range(token)?; let token_id = self.exp_map.token_by_range(token)?;
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
let (token_id, origin) = self.macro_def.map_id_up(token_id); let loc = db.lookup_intern_macro(self.file.macro_call_id);
let (token_map, tt) = match origin {
mbe::Origin::Call => { let (token_map, tt) = match &loc.kind {
let call_id = self.file.macro_call_id; MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
let loc: MacroCallLoc = db.lookup_intern_macro(call_id); Some(unshifted) => {
let arg_start = loc.kind.arg(db)?.text_range().start(); token_id = unshifted;
(&self.macro_arg.1, InFile::new(loc.kind.file_id(), arg_start)) (&attr_args.1, self.attr_input_or_mac_def_start?)
} }
mbe::Origin::Def => match (&*self.macro_def, self.def_start) { None => (
( &self.macro_arg.1,
TokenExpander::MacroDef { def_site_token_map, .. } InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
| TokenExpander::MacroRules { def_site_token_map, .. }, ),
Some(tt), },
) => (def_site_token_map, tt), _ => match origin {
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), mbe::Origin::Call => (
&self.macro_arg.1,
InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
),
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
(
TokenExpander::MacroDef { def_site_token_map, .. }
| TokenExpander::MacroRules { def_site_token_map, .. },
Some(tt),
) => (def_site_token_map, *tt),
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
}, },
}; };
@ -165,19 +181,34 @@ fn make_hygiene_info(
macro_file: MacroFile, macro_file: MacroFile,
loc: &MacroCallLoc, loc: &MacroCallLoc,
) -> Option<HygieneInfo> { ) -> Option<HygieneInfo> {
let def_offset = loc.def.ast_id().left().and_then(|id| { let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) { let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?.syntax().text_range().start(), ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(mac) => mac.body()?.syntax().text_range().start(), ast::Macro::MacroDef(mac) => mac.body()?,
}; };
Some(InFile::new(id.file_id, def_tt)) Some(InFile::new(id.file_id, def_tt))
}); });
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let tt = ast_id.to_node(db).attrs().nth(invoc_attr_index as usize)?.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
let macro_def = db.macro_def(loc.def)?; let macro_def = db.macro_def(loc.def)?;
let (_, exp_map) = db.parse_macro_expansion(macro_file).value?; let (_, exp_map) = db.parse_macro_expansion(macro_file).value?;
let macro_arg = db.macro_arg(macro_file.macro_call_id)?; let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
Some(HygieneInfo { file: macro_file, def_start: def_offset, macro_arg, macro_def, exp_map }) Some(HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
})
} }
impl HygieneFrame { impl HygieneFrame {
@ -214,7 +245,7 @@ impl HygieneFrame {
Some(it) => it, Some(it) => it,
}; };
let def_site = info.def_start.map(|it| db.hygiene_frame(it.file_id)); let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
let call_site = Some(db.hygiene_frame(calling_file)); let call_site = Some(db.hygiene_frame(calling_file));
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site } HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }

View file

@ -27,7 +27,7 @@ use std::{hash::Hash, iter, sync::Arc};
use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange}; use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange};
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
ast::{self, AstNode}, ast::{self, AstNode, AttrsOwner},
Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
}; };
@ -36,6 +36,7 @@ use crate::{
builtin_attr::BuiltinAttrExpander, builtin_attr::BuiltinAttrExpander,
builtin_derive::BuiltinDeriveExpander, builtin_derive::BuiltinDeriveExpander,
builtin_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_macro::{BuiltinFnLikeExpander, EagerExpander},
db::TokenExpander,
proc_macro::ProcMacroExpander, proc_macro::ProcMacroExpander,
}; };
@ -132,6 +133,17 @@ impl HirFileId {
}; };
Some(InFile::new(id.file_id, def_tt)) Some(InFile::new(id.file_id, def_tt))
}); });
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let tt = ast_id
.to_node(db)
.attrs()
.nth(invoc_attr_index as usize)?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
let macro_def = db.macro_def(loc.def)?; let macro_def = db.macro_def(loc.def)?;
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
@ -140,7 +152,8 @@ impl HirFileId {
Some(ExpansionInfo { Some(ExpansionInfo {
expanded: InFile::new(self, parse.syntax_node()), expanded: InFile::new(self, parse.syntax_node()),
arg: InFile::new(loc.kind.file_id(), arg_tt), arg: InFile::new(loc.kind.file_id(), arg_tt),
def, attr_input_or_mac_def,
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg, macro_arg,
macro_def, macro_def,
exp_map, exp_map,
@ -270,7 +283,7 @@ pub enum MacroCallKind {
Attr { Attr {
ast_id: AstId<ast::Item>, ast_id: AstId<ast::Item>,
attr_name: String, attr_name: String,
attr_args: tt::Subtree, attr_args: (tt::Subtree, mbe::TokenMap),
/// Syntactical index of the invoking `#[attribute]`. /// Syntactical index of the invoking `#[attribute]`.
/// ///
/// Outer attributes are counted first, then inner attributes. This does not support /// Outer attributes are counted first, then inner attributes. This does not support
@ -335,11 +348,12 @@ impl MacroCallId {
pub struct ExpansionInfo { pub struct ExpansionInfo {
expanded: InFile<SyntaxNode>, expanded: InFile<SyntaxNode>,
arg: InFile<SyntaxNode>, arg: InFile<SyntaxNode>,
/// The `macro_rules!` arguments. /// The `macro_rules!` arguments or attribute input.
def: Option<InFile<ast::TokenTree>>, attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: Arc<db::TokenExpander>, macro_def: Arc<TokenExpander>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>, exp_map: Arc<mbe::TokenMap>,
} }
@ -350,11 +364,53 @@ impl ExpansionInfo {
Some(self.arg.with_value(self.arg.value.parent()?)) Some(self.arg.with_value(self.arg.value.parent()?))
} }
pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option<InFile<SyntaxToken>> { pub fn map_token_down(
&self,
db: &dyn db::AstDatabase,
item: Option<ast::Item>,
token: InFile<&SyntaxToken>,
) -> Option<InFile<SyntaxToken>> {
assert_eq!(token.file_id, self.arg.file_id); assert_eq!(token.file_id, self.arg.file_id);
let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; let token_id = if let Some(item) = item {
let token_id = self.macro_arg.1.token_by_range(range)?; let call_id = match self.expanded.file_id.0 {
let token_id = self.macro_def.map_id_down(token_id); HirFileIdRepr::FileId(_) => return None,
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id,
};
let loc = db.lookup_intern_macro(call_id);
let token_range = token.value.text_range();
match &loc.kind {
MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => {
let attr = item.attrs().nth(*invoc_attr_index as usize)?;
match attr.token_tree() {
Some(token_tree)
if token_tree.syntax().text_range().contains_range(token_range) =>
{
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let range = token.value.text_range().checked_sub(attr_input_start)?;
let token_id =
self.macro_arg_shift.shift(attr_args.1.token_by_range(range)?);
Some(token_id)
}
_ => None,
}
}
_ => None,
}
} else {
None
};
let token_id = match token_id {
Some(token_id) => token_id,
None => {
let range =
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
let token_id = self.macro_arg.1.token_by_range(range)?;
self.macro_def.map_id_down(token_id)
}
};
let range = self.exp_map.range_by_token(token_id, token.value.kind())?; let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
@ -365,20 +421,36 @@ impl ExpansionInfo {
pub fn map_token_up( pub fn map_token_up(
&self, &self,
db: &dyn db::AstDatabase,
token: InFile<&SyntaxToken>, token: InFile<&SyntaxToken>,
) -> Option<(InFile<SyntaxToken>, Origin)> { ) -> Option<(InFile<SyntaxToken>, Origin)> {
let token_id = self.exp_map.token_by_range(token.value.text_range())?; let token_id = self.exp_map.token_by_range(token.value.text_range())?;
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
let (token_id, origin) = self.macro_def.map_id_up(token_id); let call_id = match self.expanded.file_id.0 {
let (token_map, tt) = match origin { HirFileIdRepr::FileId(_) => return None,
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id,
mbe::Origin::Def => match (&*self.macro_def, self.def.as_ref()) { };
( let loc = db.lookup_intern_macro(call_id);
db::TokenExpander::MacroRules { def_site_token_map, .. }
| db::TokenExpander::MacroDef { def_site_token_map, .. }, let (token_map, tt) = match &loc.kind {
Some(tt), MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
) => (def_site_token_map, tt.syntax().cloned()), Some(unshifted) => {
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
}
None => (&self.macro_arg.1, self.arg.clone()),
},
_ => match origin {
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
(
TokenExpander::MacroRules { def_site_token_map, .. }
| TokenExpander::MacroDef { def_site_token_map, .. },
Some(tt),
) => (def_site_token_map, tt.syntax().cloned()),
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
}, },
}; };
@ -532,7 +604,7 @@ fn ascend_call_token(
expansion: &ExpansionInfo, expansion: &ExpansionInfo,
token: InFile<SyntaxToken>, token: InFile<SyntaxToken>,
) -> Option<InFile<SyntaxToken>> { ) -> Option<InFile<SyntaxToken>> {
let (mapped, origin) = expansion.map_token_up(token.as_ref())?; let (mapped, origin) = expansion.map_token_up(db, token.as_ref())?;
if origin != Origin::Call { if origin != Origin::Call {
return None; return None;
} }

View file

@ -97,11 +97,11 @@ struct Rule {
rhs: MetaTemplate, rhs: MetaTemplate,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
struct Shift(u32); pub struct Shift(u32);
impl Shift { impl Shift {
fn new(tt: &tt::Subtree) -> Shift { pub fn new(tt: &tt::Subtree) -> Shift {
// Note that TokenId is started from zero, // Note that TokenId is started from zero,
// We have to add 1 to prevent duplication. // We have to add 1 to prevent duplication.
let value = max_id(tt).map_or(0, |it| it + 1); let value = max_id(tt).map_or(0, |it| it + 1);
@ -134,7 +134,7 @@ impl Shift {
} }
/// Shift given TokenTree token id /// Shift given TokenTree token id
fn shift_all(self, tt: &mut tt::Subtree) { pub fn shift_all(self, tt: &mut tt::Subtree) {
for t in &mut tt.token_trees { for t in &mut tt.token_trees {
match t { match t {
tt::TokenTree::Leaf(leaf) => match leaf { tt::TokenTree::Leaf(leaf) => match leaf {
@ -152,14 +152,14 @@ impl Shift {
} }
} }
fn shift(self, id: tt::TokenId) -> tt::TokenId { pub fn shift(self, id: tt::TokenId) -> tt::TokenId {
if id == tt::TokenId::unspecified() { if id == tt::TokenId::unspecified() {
return id; return id;
} }
tt::TokenId(id.0 + self.0) tt::TokenId(id.0 + self.0)
} }
fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> { pub fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
id.0.checked_sub(self.0).map(tt::TokenId) id.0.checked_sub(self.0).map(tt::TokenId)
} }
} }

View file

@ -1,9 +1,11 @@
//! Mapping between `TokenId`s and the token's position in macro definitions or inputs. //! Mapping between `TokenId`s and the token's position in macro definitions or inputs.
use std::hash::Hash;
use parser::{SyntaxKind, T}; use parser::{SyntaxKind, T};
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
enum TokenTextRange { enum TokenTextRange {
Token(TextRange), Token(TextRange),
Delimiter(TextRange), Delimiter(TextRange),
@ -25,7 +27,7 @@ impl TokenTextRange {
} }
/// Maps `tt::TokenId` to the relative range of the original token. /// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Debug, PartialEq, Eq, Clone, Default)] #[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct TokenMap { pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range. /// Maps `tt::TokenId` to the *relative* source range.
entries: Vec<(tt::TokenId, TokenTextRange)>, entries: Vec<(tt::TokenId, TokenTextRange)>,