mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-11-03 05:13:35 +00:00
Merge pull request #18327 from ChayimFriedman2/flat-tt
Store token trees in contiguous `Vec` instead of as a tree
This commit is contained in:
commit
b6910ed1b2
50 changed files with 2356 additions and 2286 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
|
@ -1389,6 +1389,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-api",
|
"proc-macro-api",
|
||||||
"proc-macro-srv",
|
"proc-macro-srv",
|
||||||
|
"tt",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
||||||
|
|
@ -45,8 +45,8 @@ impl From<CfgAtom> for CfgExpr {
|
||||||
|
|
||||||
impl CfgExpr {
|
impl CfgExpr {
|
||||||
#[cfg(feature = "tt")]
|
#[cfg(feature = "tt")]
|
||||||
pub fn parse<S>(tt: &tt::Subtree<S>) -> CfgExpr {
|
pub fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> CfgExpr {
|
||||||
next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
|
next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
|
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
|
||||||
|
|
@ -66,19 +66,19 @@ impl CfgExpr {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "tt")]
|
#[cfg(feature = "tt")]
|
||||||
fn next_cfg_expr<S>(it: &mut std::slice::Iter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
|
fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
|
use tt::iter::TtElement;
|
||||||
|
|
||||||
let name = match it.next() {
|
let name = match it.next() {
|
||||||
None => return None,
|
None => return None,
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
|
Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
|
||||||
Some(_) => return Some(CfgExpr::Invalid),
|
Some(_) => return Some(CfgExpr::Invalid),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Peek
|
let ret = match it.peek() {
|
||||||
let ret = match it.as_slice().first() {
|
Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
|
match it.remaining().flat_tokens().get(1) {
|
||||||
match it.as_slice().get(1) {
|
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
|
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
|
||||||
it.next();
|
it.next();
|
||||||
it.next();
|
it.next();
|
||||||
|
|
@ -87,9 +87,8 @@ fn next_cfg_expr<S>(it: &mut std::slice::Iter<'_, tt::TokenTree<S>>) -> Option<C
|
||||||
_ => return Some(CfgExpr::Invalid),
|
_ => return Some(CfgExpr::Invalid),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(tt::TokenTree::Subtree(subtree)) => {
|
Some(TtElement::Subtree(_, mut sub_it)) => {
|
||||||
it.next();
|
it.next();
|
||||||
let mut sub_it = subtree.token_trees.iter();
|
|
||||||
let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it));
|
let mut subs = std::iter::from_fn(|| next_cfg_expr(&mut sub_it));
|
||||||
match name {
|
match name {
|
||||||
s if s == sym::all => CfgExpr::All(subs.collect()),
|
s if s == sym::all => CfgExpr::All(subs.collect()),
|
||||||
|
|
@ -104,7 +103,7 @@ fn next_cfg_expr<S>(it: &mut std::slice::Iter<'_, tt::TokenTree<S>>) -> Option<C
|
||||||
};
|
};
|
||||||
|
|
||||||
// Eat comma separator
|
// Eat comma separator
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = it.as_slice().first() {
|
if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = it.peek() {
|
||||||
if punct.char == ',' {
|
if punct.char == ',' {
|
||||||
it.next();
|
it.next();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
||||||
|
|
||||||
use std::{borrow::Cow, hash::Hash, ops, slice};
|
use std::{borrow::Cow, hash::Hash, ops};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgExpr, CfgOptions};
|
||||||
|
|
@ -17,6 +17,7 @@ use syntax::{
|
||||||
AstPtr,
|
AstPtr,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
use tt::iter::{TtElement, TtIter};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
|
|
@ -154,15 +155,15 @@ impl Attrs {
|
||||||
|
|
||||||
pub fn has_doc_hidden(&self) -> bool {
|
pub fn has_doc_hidden(&self) -> bool {
|
||||||
self.by_key(&sym::doc).tt_values().any(|tt| {
|
self.by_key(&sym::doc).tt_values().any(|tt| {
|
||||||
tt.delimiter.kind == DelimiterKind::Parenthesis &&
|
tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
|
||||||
matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
|
matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_doc_notable_trait(&self) -> bool {
|
pub fn has_doc_notable_trait(&self) -> bool {
|
||||||
self.by_key(&sym::doc).tt_values().any(|tt| {
|
self.by_key(&sym::doc).tt_values().any(|tt| {
|
||||||
tt.delimiter.kind == DelimiterKind::Parenthesis &&
|
tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
|
||||||
matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
|
matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -245,8 +246,8 @@ impl From<DocAtom> for DocExpr {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DocExpr {
|
impl DocExpr {
|
||||||
fn parse<S>(tt: &tt::Subtree<S>) -> DocExpr {
|
fn parse<S: Copy>(tt: &tt::TopSubtree<S>) -> DocExpr {
|
||||||
next_doc_expr(&mut tt.token_trees.iter()).unwrap_or(DocExpr::Invalid)
|
next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn aliases(&self) -> &[Symbol] {
|
pub fn aliases(&self) -> &[Symbol] {
|
||||||
|
|
@ -260,32 +261,29 @@ impl DocExpr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_doc_expr<S>(it: &mut slice::Iter<'_, tt::TokenTree<S>>) -> Option<DocExpr> {
|
fn next_doc_expr<S: Copy>(mut it: TtIter<'_, S>) -> Option<DocExpr> {
|
||||||
let name = match it.next() {
|
let name = match it.next() {
|
||||||
None => return None,
|
None => return None,
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
|
Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
|
||||||
Some(_) => return Some(DocExpr::Invalid),
|
Some(_) => return Some(DocExpr::Invalid),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Peek
|
// Peek
|
||||||
let ret = match it.as_slice().first() {
|
let ret = match it.peek() {
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
|
Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
|
||||||
match it.as_slice().get(1) {
|
it.next();
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
match it.next() {
|
||||||
|
Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
kind: tt::LitKind::Str,
|
kind: tt::LitKind::Str,
|
||||||
..
|
..
|
||||||
}))) => {
|
}))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(),
|
||||||
it.next();
|
|
||||||
it.next();
|
|
||||||
DocAtom::KeyValue { key: name, value: text.clone() }.into()
|
|
||||||
}
|
|
||||||
_ => return Some(DocExpr::Invalid),
|
_ => return Some(DocExpr::Invalid),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(tt::TokenTree::Subtree(subtree)) => {
|
Some(TtElement::Subtree(_, subtree_iter)) => {
|
||||||
it.next();
|
it.next();
|
||||||
let subs = parse_comma_sep(subtree);
|
let subs = parse_comma_sep(subtree_iter);
|
||||||
match &name {
|
match &name {
|
||||||
s if *s == sym::alias => DocExpr::Alias(subs),
|
s if *s == sym::alias => DocExpr::Alias(subs),
|
||||||
_ => DocExpr::Invalid,
|
_ => DocExpr::Invalid,
|
||||||
|
|
@ -293,25 +291,13 @@ fn next_doc_expr<S>(it: &mut slice::Iter<'_, tt::TokenTree<S>>) -> Option<DocExp
|
||||||
}
|
}
|
||||||
_ => DocAtom::Flag(name).into(),
|
_ => DocAtom::Flag(name).into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Eat comma separator
|
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = it.as_slice().first() {
|
|
||||||
if punct.char == ',' {
|
|
||||||
it.next();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(ret)
|
Some(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_comma_sep<S>(subtree: &tt::Subtree<S>) -> Vec<Symbol> {
|
fn parse_comma_sep<S>(iter: TtIter<'_, S>) -> Vec<Symbol> {
|
||||||
subtree
|
iter.filter_map(|tt| match tt {
|
||||||
.token_trees
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
.iter()
|
kind: tt::LitKind::Str, symbol, ..
|
||||||
.filter_map(|tt| match tt {
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
|
||||||
kind: tt::LitKind::Str,
|
|
||||||
symbol,
|
|
||||||
..
|
|
||||||
})) => Some(symbol.clone()),
|
})) => Some(symbol.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
|
|
@ -563,7 +549,7 @@ pub struct AttrQuery<'attr> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'attr> AttrQuery<'attr> {
|
impl<'attr> AttrQuery<'attr> {
|
||||||
pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::Subtree> {
|
pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::TopSubtree> {
|
||||||
self.attrs().filter_map(|attr| attr.token_tree_value())
|
self.attrs().filter_map(|attr| attr.token_tree_value())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -596,12 +582,12 @@ impl<'attr> AttrQuery<'attr> {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&'attr str> {
|
pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&'attr str> {
|
||||||
self.tt_values().find_map(|tt| {
|
self.tt_values().find_map(|tt| {
|
||||||
let name = tt.token_trees.iter()
|
let name = tt.iter()
|
||||||
.skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == *key))
|
.skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == *key))
|
||||||
.nth(2);
|
.nth(2);
|
||||||
|
|
||||||
match name {
|
match name {
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
|
Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
|
||||||
_ => None
|
_ => None
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ use la_arena::{Idx, RawIdx};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use syntax::{ast, Parse};
|
use syntax::{ast, Parse};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
use tt::iter::TtElement;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
|
|
@ -156,20 +157,21 @@ impl FunctionData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_rustc_legacy_const_generics(tt: &crate::tt::Subtree) -> Box<[u32]> {
|
fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
|
||||||
let mut indices = Vec::new();
|
let mut indices = Vec::new();
|
||||||
for args in tt.token_trees.chunks(2) {
|
let mut iter = tt.iter();
|
||||||
match &args[0] {
|
while let (Some(first), second) = (iter.next(), iter.next()) {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
|
match first {
|
||||||
|
TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
|
||||||
Ok(index) => indices.push(index),
|
Ok(index) => indices.push(index),
|
||||||
Err(_) => break,
|
Err(_) => break,
|
||||||
},
|
},
|
||||||
_ => break,
|
_ => break,
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(comma) = args.get(1) {
|
if let Some(comma) = second {
|
||||||
match comma {
|
match comma {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
|
TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
|
||||||
_ => break,
|
_ => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -267,8 +269,8 @@ impl TraitData {
|
||||||
attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists();
|
attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists();
|
||||||
let mut skip_boxed_slice_during_method_dispatch = false;
|
let mut skip_boxed_slice_during_method_dispatch = false;
|
||||||
for tt in attrs.by_key(&sym::rustc_skip_during_method_dispatch).tt_values() {
|
for tt in attrs.by_key(&sym::rustc_skip_during_method_dispatch).tt_values() {
|
||||||
for tt in tt.token_trees.iter() {
|
for tt in tt.iter() {
|
||||||
if let crate::tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt {
|
if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
|
||||||
skip_array_during_method_dispatch |= ident.sym == sym::array;
|
skip_array_during_method_dispatch |= ident.sym == sym::array;
|
||||||
skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
|
skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
|
||||||
}
|
}
|
||||||
|
|
@ -421,7 +423,7 @@ impl Macro2Data {
|
||||||
.by_key(&sym::rustc_builtin_macro)
|
.by_key(&sym::rustc_builtin_macro)
|
||||||
.tt_values()
|
.tt_values()
|
||||||
.next()
|
.next()
|
||||||
.and_then(|attr| parse_macro_name_and_helper_attrs(&attr.token_trees))
|
.and_then(parse_macro_name_and_helper_attrs)
|
||||||
.map(|(_, helpers)| helpers);
|
.map(|(_, helpers)| helpers);
|
||||||
|
|
||||||
Arc::new(Macro2Data {
|
Arc::new(Macro2Data {
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ use intern::sym;
|
||||||
use la_arena::Arena;
|
use la_arena::Arena;
|
||||||
use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
|
use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
use tt::iter::TtElement;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin_type::{BuiltinInt, BuiltinUint},
|
builtin_type::{BuiltinInt, BuiltinUint},
|
||||||
|
|
@ -20,7 +21,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
lang_item::LangItem,
|
lang_item::LangItem,
|
||||||
nameres::diagnostics::{DefDiagnostic, DefDiagnostics},
|
nameres::diagnostics::{DefDiagnostic, DefDiagnostics},
|
||||||
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
|
tt::{Delimiter, DelimiterKind, Leaf, TopSubtree},
|
||||||
type_ref::{TypeRefId, TypesMap},
|
type_ref::{TypeRefId, TypesMap},
|
||||||
visibility::RawVisibility,
|
visibility::RawVisibility,
|
||||||
EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId,
|
EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId,
|
||||||
|
|
@ -95,8 +96,8 @@ fn repr_from_value(
|
||||||
item_tree.attrs(db, krate, of).by_key(&sym::repr).tt_values().find_map(parse_repr_tt)
|
item_tree.attrs(db, krate, of).by_key(&sym::repr).tt_values().find_map(parse_repr_tt)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
|
fn parse_repr_tt(tt: &TopSubtree) -> Option<ReprOptions> {
|
||||||
match tt.delimiter {
|
match tt.top_subtree().delimiter {
|
||||||
Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
|
Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
|
|
@ -106,14 +107,14 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
|
||||||
let mut max_align: Option<Align> = None;
|
let mut max_align: Option<Align> = None;
|
||||||
let mut min_pack: Option<Align> = None;
|
let mut min_pack: Option<Align> = None;
|
||||||
|
|
||||||
let mut tts = tt.token_trees.iter().peekable();
|
let mut tts = tt.iter();
|
||||||
while let Some(tt) = tts.next() {
|
while let Some(tt) = tts.next() {
|
||||||
if let TokenTree::Leaf(Leaf::Ident(ident)) = tt {
|
if let TtElement::Leaf(Leaf::Ident(ident)) = tt {
|
||||||
flags.insert(match &ident.sym {
|
flags.insert(match &ident.sym {
|
||||||
s if *s == sym::packed => {
|
s if *s == sym::packed => {
|
||||||
let pack = if let Some(TokenTree::Subtree(tt)) = tts.peek() {
|
let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
|
||||||
tts.next();
|
tts.next();
|
||||||
if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() {
|
if let Some(TtElement::Leaf(Leaf::Literal(lit))) = tt_iter.next() {
|
||||||
lit.symbol.as_str().parse().unwrap_or_default()
|
lit.symbol.as_str().parse().unwrap_or_default()
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
|
|
@ -127,9 +128,9 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
|
||||||
ReprFlags::empty()
|
ReprFlags::empty()
|
||||||
}
|
}
|
||||||
s if *s == sym::align => {
|
s if *s == sym::align => {
|
||||||
if let Some(TokenTree::Subtree(tt)) = tts.peek() {
|
if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
|
||||||
tts.next();
|
tts.next();
|
||||||
if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() {
|
if let Some(TtElement::Leaf(Leaf::Literal(lit))) = tt_iter.next() {
|
||||||
if let Ok(align) = lit.symbol.as_str().parse() {
|
if let Ok(align) = lit.symbol.as_str().parse() {
|
||||||
let align = Align::from_bytes(align).ok();
|
let align = Align::from_bytes(align).ok();
|
||||||
max_align = max_align.max(align);
|
max_align = max_align.max(align);
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ use crate::{
|
||||||
item_tree::{AttrOwner, ItemTree, ItemTreeSourceMaps},
|
item_tree::{AttrOwner, ItemTree, ItemTreeSourceMaps},
|
||||||
lang_item::{self, LangItem, LangItemTarget, LangItems},
|
lang_item::{self, LangItem, LangItemTarget, LangItems},
|
||||||
nameres::{diagnostics::DefDiagnostics, DefMap},
|
nameres::{diagnostics::DefDiagnostics, DefMap},
|
||||||
|
tt,
|
||||||
type_ref::TypesSourceMap,
|
type_ref::TypesSourceMap,
|
||||||
visibility::{self, Visibility},
|
visibility::{self, Visibility},
|
||||||
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
|
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
|
||||||
|
|
@ -294,14 +295,14 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
|
||||||
// This is a `cfg_attr`; check if it could possibly expand to `no_std`.
|
// This is a `cfg_attr`; check if it could possibly expand to `no_std`.
|
||||||
// Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
|
// Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
|
||||||
let tt = match attr.token_tree_value() {
|
let tt = match attr.token_tree_value() {
|
||||||
Some(tt) => &tt.token_trees,
|
Some(tt) => tt.token_trees(),
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
let segments =
|
let segments =
|
||||||
tt.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
|
tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
|
||||||
for output in segments.skip(1) {
|
for output in segments.skip(1) {
|
||||||
match output {
|
match output.flat_tokens() {
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
|
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,7 @@ use crate::{
|
||||||
resolver::HasResolver,
|
resolver::HasResolver,
|
||||||
src::HasSource,
|
src::HasSource,
|
||||||
test_db::TestDB,
|
test_db::TestDB,
|
||||||
tt::Subtree,
|
tt::TopSubtree,
|
||||||
AdtId, AsMacroCall, Lookup, ModuleDefId,
|
AdtId, AsMacroCall, Lookup, ModuleDefId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -313,14 +313,14 @@ struct IdentityWhenValidProcMacroExpander;
|
||||||
impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
|
impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &Subtree,
|
subtree: &TopSubtree,
|
||||||
_: Option<&Subtree>,
|
_: Option<&TopSubtree>,
|
||||||
_: &base_db::Env,
|
_: &base_db::Env,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Option<String>,
|
_: Option<String>,
|
||||||
) -> Result<Subtree, ProcMacroExpansionError> {
|
) -> Result<TopSubtree, ProcMacroExpansionError> {
|
||||||
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
|
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
|
||||||
subtree,
|
subtree,
|
||||||
syntax_bridge::TopEntryPoint::MacroItems,
|
syntax_bridge::TopEntryPoint::MacroItems,
|
||||||
|
|
|
||||||
|
|
@ -110,8 +110,8 @@ pub(super) fn attr_macro_as_call_id(
|
||||||
) -> MacroCallId {
|
) -> MacroCallId {
|
||||||
let arg = match macro_attr.input.as_deref() {
|
let arg = match macro_attr.input.as_deref() {
|
||||||
Some(AttrInput::TokenTree(tt)) => {
|
Some(AttrInput::TokenTree(tt)) => {
|
||||||
let mut tt = tt.as_ref().clone();
|
let mut tt = tt.clone();
|
||||||
tt.delimiter.kind = tt::DelimiterKind::Invisible;
|
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
|
||||||
Some(tt)
|
Some(tt)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2214,8 +2214,8 @@ impl ModCollector<'_, '_> {
|
||||||
|
|
||||||
let is_export = export_attr.exists();
|
let is_export = export_attr.exists();
|
||||||
let local_inner = if is_export {
|
let local_inner = if is_export {
|
||||||
export_attr.tt_values().flat_map(|it| it.token_trees.iter()).any(|it| match it {
|
export_attr.tt_values().flat_map(|it| it.iter()).any(|it| match it {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.sym == sym::local_inner_macros,
|
tt::TtElement::Leaf(tt::Leaf::Ident(ident)) => ident.sym == sym::local_inner_macros,
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -2234,7 +2234,7 @@ impl ModCollector<'_, '_> {
|
||||||
None => {
|
None => {
|
||||||
let explicit_name =
|
let explicit_name =
|
||||||
attrs.by_key(&sym::rustc_builtin_macro).tt_values().next().and_then(|tt| {
|
attrs.by_key(&sym::rustc_builtin_macro).tt_values().next().and_then(|tt| {
|
||||||
match tt.token_trees.first() {
|
match tt.token_trees().flat_tokens().first() {
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name),
|
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
|
|
@ -2304,9 +2304,7 @@ impl ModCollector<'_, '_> {
|
||||||
// NOTE: The item *may* have both `#[rustc_builtin_macro]` and `#[proc_macro_derive]`,
|
// NOTE: The item *may* have both `#[rustc_builtin_macro]` and `#[proc_macro_derive]`,
|
||||||
// in which case rustc ignores the helper attributes from the latter, but it
|
// in which case rustc ignores the helper attributes from the latter, but it
|
||||||
// "doesn't make sense in practice" (see rust-lang/rust#87027).
|
// "doesn't make sense in practice" (see rust-lang/rust#87027).
|
||||||
if let Some((name, helpers)) =
|
if let Some((name, helpers)) = parse_macro_name_and_helper_attrs(attr) {
|
||||||
parse_macro_name_and_helper_attrs(&attr.token_trees)
|
|
||||||
{
|
|
||||||
// NOTE: rustc overrides the name if the macro name if it's different from the
|
// NOTE: rustc overrides the name if the macro name if it's different from the
|
||||||
// macro name, but we assume it isn't as there's no such case yet. FIXME if
|
// macro name, but we assume it isn't as there's no such case yet. FIXME if
|
||||||
// the following assertion fails.
|
// the following assertion fails.
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ use hir_expand::name::{AsName, Name};
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
|
|
||||||
use crate::attr::Attrs;
|
use crate::attr::Attrs;
|
||||||
use crate::tt::{Leaf, TokenTree};
|
use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct ProcMacroDef {
|
pub struct ProcMacroDef {
|
||||||
|
|
@ -38,7 +38,7 @@ impl Attrs {
|
||||||
Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr })
|
Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr })
|
||||||
} else if self.by_key(&sym::proc_macro_derive).exists() {
|
} else if self.by_key(&sym::proc_macro_derive).exists() {
|
||||||
let derive = self.by_key(&sym::proc_macro_derive).tt_values().next()?;
|
let derive = self.by_key(&sym::proc_macro_derive).tt_values().next()?;
|
||||||
let def = parse_macro_name_and_helper_attrs(&derive.token_trees)
|
let def = parse_macro_name_and_helper_attrs(derive)
|
||||||
.map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } });
|
.map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } });
|
||||||
|
|
||||||
if def.is_none() {
|
if def.is_none() {
|
||||||
|
|
@ -55,8 +55,8 @@ impl Attrs {
|
||||||
// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
|
// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
|
||||||
// the same structure.
|
// the same structure.
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Name, Box<[Name]>)> {
|
pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name, Box<[Name]>)> {
|
||||||
match tt {
|
match tt.token_trees().flat_tokens() {
|
||||||
// `#[proc_macro_derive(Trait)]`
|
// `#[proc_macro_derive(Trait)]`
|
||||||
// `#[rustc_builtin_macro(Trait)]`
|
// `#[rustc_builtin_macro(Trait)]`
|
||||||
[TokenTree::Leaf(Leaf::Ident(trait_name))] => Some((trait_name.as_name(), Box::new([]))),
|
[TokenTree::Leaf(Leaf::Ident(trait_name))] => Some((trait_name.as_name(), Box::new([]))),
|
||||||
|
|
@ -67,17 +67,18 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Nam
|
||||||
TokenTree::Leaf(Leaf::Ident(trait_name)),
|
TokenTree::Leaf(Leaf::Ident(trait_name)),
|
||||||
TokenTree::Leaf(Leaf::Punct(comma)),
|
TokenTree::Leaf(Leaf::Punct(comma)),
|
||||||
TokenTree::Leaf(Leaf::Ident(attributes)),
|
TokenTree::Leaf(Leaf::Ident(attributes)),
|
||||||
TokenTree::Subtree(helpers)
|
TokenTree::Subtree(_),
|
||||||
|
..
|
||||||
] if comma.char == ',' && attributes.sym == sym::attributes =>
|
] if comma.char == ',' && attributes.sym == sym::attributes =>
|
||||||
{
|
{
|
||||||
|
let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?;
|
||||||
let helpers = helpers
|
let helpers = helpers
|
||||||
.token_trees
|
|
||||||
.iter()
|
.iter()
|
||||||
.filter(
|
.filter(
|
||||||
|tt| !matches!(tt, TokenTree::Leaf(Leaf::Punct(comma)) if comma.char == ','),
|
|tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','),
|
||||||
)
|
)
|
||||||
.map(|tt| match tt {
|
.map(|tt| match tt {
|
||||||
TokenTree::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
|
TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect::<Option<Box<[_]>>>()?;
|
.collect::<Option<Box<[_]>>>()?;
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ use crate::{
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
mod_path::ModPath,
|
mod_path::ModPath,
|
||||||
span_map::SpanMapRef,
|
span_map::SpanMapRef,
|
||||||
tt::{self, token_to_literal, Subtree},
|
tt::{self, token_to_literal, TopSubtree},
|
||||||
InFile,
|
InFile,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -152,7 +152,7 @@ impl RawAttrs {
|
||||||
);
|
);
|
||||||
|
|
||||||
let cfg_options = &crate_graph[krate].cfg_options;
|
let cfg_options = &crate_graph[krate].cfg_options;
|
||||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: Box::from(cfg) };
|
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
|
||||||
let cfg = CfgExpr::parse(&cfg);
|
let cfg = CfgExpr::parse(&cfg);
|
||||||
if cfg_options.check(&cfg) == Some(false) {
|
if cfg_options.check(&cfg) == Some(false) {
|
||||||
smallvec![]
|
smallvec![]
|
||||||
|
|
@ -219,7 +219,7 @@ pub enum AttrInput {
|
||||||
/// `#[attr = "string"]`
|
/// `#[attr = "string"]`
|
||||||
Literal(tt::Literal),
|
Literal(tt::Literal),
|
||||||
/// `#[attr(subtree)]`
|
/// `#[attr(subtree)]`
|
||||||
TokenTree(Box<tt::Subtree>),
|
TokenTree(tt::TopSubtree),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for AttrInput {
|
impl fmt::Display for AttrInput {
|
||||||
|
|
@ -254,46 +254,59 @@ impl Attr {
|
||||||
span,
|
span,
|
||||||
DocCommentDesugarMode::ProcMacro,
|
DocCommentDesugarMode::ProcMacro,
|
||||||
);
|
);
|
||||||
Some(Box::new(AttrInput::TokenTree(Box::new(tree))))
|
Some(Box::new(AttrInput::TokenTree(tree)))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
Some(Attr { id, path, input, ctxt: span.ctx })
|
Some(Attr { id, path, input, ctxt: span.ctx })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tt(db: &dyn ExpandDatabase, mut tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
|
fn from_tt(
|
||||||
if matches!(tt,
|
db: &dyn ExpandDatabase,
|
||||||
|
mut tt: tt::TokenTreesView<'_>,
|
||||||
|
id: AttrId,
|
||||||
|
) -> Option<Attr> {
|
||||||
|
if matches!(tt.flat_tokens(),
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
|
[tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
|
||||||
if *sym == sym::unsafe_
|
if *sym == sym::unsafe_
|
||||||
) {
|
) {
|
||||||
match tt.get(1) {
|
match tt.iter().nth(1) {
|
||||||
Some(tt::TokenTree::Subtree(subtree)) => tt = &subtree.token_trees,
|
Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let first = &tt.first()?;
|
let first = tt.flat_tokens().first()?;
|
||||||
let ctxt = first.first_span().ctx;
|
let ctxt = first.first_span().ctx;
|
||||||
let path_end = tt
|
let (path, input) = {
|
||||||
.iter()
|
let mut iter = tt.iter();
|
||||||
.position(|tt| {
|
let start = iter.savepoint();
|
||||||
!matches!(
|
let mut input = tt::TokenTreesView::new(&[]);
|
||||||
|
let mut path = iter.from_savepoint(start);
|
||||||
|
let mut path_split_savepoint = iter.savepoint();
|
||||||
|
while let Some(tt) = iter.next() {
|
||||||
|
path = iter.from_savepoint(start);
|
||||||
|
if !matches!(
|
||||||
tt,
|
tt,
|
||||||
tt::TokenTree::Leaf(
|
tt::TtElement::Leaf(
|
||||||
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
|
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
|
||||||
)
|
)
|
||||||
)
|
) {
|
||||||
})
|
input = path_split_savepoint.remaining();
|
||||||
.unwrap_or(tt.len());
|
break;
|
||||||
|
}
|
||||||
|
path_split_savepoint = iter.savepoint();
|
||||||
|
}
|
||||||
|
(path, input)
|
||||||
|
};
|
||||||
|
|
||||||
let (path, input) = tt.split_at(path_end);
|
|
||||||
let path = Interned::new(ModPath::from_tt(db, path)?);
|
let path = Interned::new(ModPath::from_tt(db, path)?);
|
||||||
|
|
||||||
let input = match input.first() {
|
let input = match (input.flat_tokens().first(), input.try_into_subtree()) {
|
||||||
Some(tt::TokenTree::Subtree(tree)) => {
|
(_, Some(tree)) => {
|
||||||
Some(Box::new(AttrInput::TokenTree(Box::new(tree.clone()))))
|
Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
|
||||||
}
|
}
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
|
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
|
||||||
let input = match input.get(1) {
|
let input = match input.flat_tokens().get(1) {
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
|
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
|
||||||
Some(Box::new(AttrInput::Literal(lit.clone())))
|
Some(Box::new(AttrInput::Literal(lit.clone())))
|
||||||
}
|
}
|
||||||
|
|
@ -352,7 +365,7 @@ impl Attr {
|
||||||
/// #[path(ident)]
|
/// #[path(ident)]
|
||||||
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
|
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
|
||||||
match self.input.as_deref()? {
|
match self.input.as_deref()? {
|
||||||
AttrInput::TokenTree(tt) => match &*tt.token_trees {
|
AttrInput::TokenTree(tt) => match tt.token_trees().flat_tokens() {
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
|
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
|
@ -361,7 +374,7 @@ impl Attr {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// #[path TokenTree]
|
/// #[path TokenTree]
|
||||||
pub fn token_tree_value(&self) -> Option<&Subtree> {
|
pub fn token_tree_value(&self) -> Option<&TopSubtree> {
|
||||||
match self.input.as_deref()? {
|
match self.input.as_deref()? {
|
||||||
AttrInput::TokenTree(tt) => Some(tt),
|
AttrInput::TokenTree(tt) => Some(tt),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
|
@ -375,14 +388,14 @@ impl Attr {
|
||||||
) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
|
) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
|
||||||
let args = self.token_tree_value()?;
|
let args = self.token_tree_value()?;
|
||||||
|
|
||||||
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let paths = args
|
let paths = args
|
||||||
.token_trees
|
.token_trees()
|
||||||
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
|
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
|
||||||
.filter_map(move |tts| {
|
.filter_map(move |tts| {
|
||||||
let span = tts.first()?.first_span();
|
let span = tts.flat_tokens().first()?.first_span();
|
||||||
Some((ModPath::from_tt(db, tts)?, span))
|
Some((ModPath::from_tt(db, tts)?, span))
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -467,11 +480,11 @@ fn inner_attributes(
|
||||||
// Input subtree is: `(cfg, $(attr),+)`
|
// Input subtree is: `(cfg, $(attr),+)`
|
||||||
// Split it up into a `cfg` subtree and the `attr` subtrees.
|
// Split it up into a `cfg` subtree and the `attr` subtrees.
|
||||||
fn parse_cfg_attr_input(
|
fn parse_cfg_attr_input(
|
||||||
subtree: &Subtree,
|
subtree: &TopSubtree,
|
||||||
) -> Option<(&[tt::TokenTree], impl Iterator<Item = &[tt::TokenTree]>)> {
|
) -> Option<(tt::TokenTreesView<'_>, impl Iterator<Item = tt::TokenTreesView<'_>>)> {
|
||||||
let mut parts = subtree
|
let mut parts = subtree
|
||||||
.token_trees
|
.token_trees()
|
||||||
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
|
.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
|
||||||
let cfg = parts.next()?;
|
let cfg = parts.next()?;
|
||||||
Some((cfg, parts.filter(|it| !it.is_empty())))
|
Some((cfg, parts.filter(|it| !it.is_empty())))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ macro_rules! register_builtin {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuiltinAttrExpander {
|
impl BuiltinAttrExpander {
|
||||||
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> {
|
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::TopSubtree, Span) -> ExpandResult<tt::TopSubtree> {
|
||||||
match *self {
|
match *self {
|
||||||
$( BuiltinAttrExpander::$variant => $expand, )*
|
$( BuiltinAttrExpander::$variant => $expand, )*
|
||||||
}
|
}
|
||||||
|
|
@ -36,9 +36,9 @@ impl BuiltinAttrExpander {
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
self.expander()(db, id, tt, span)
|
self.expander()(db, id, tt, span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -75,18 +75,18 @@ pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
|
||||||
fn dummy_attr_expand(
|
fn dummy_attr_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
_span: Span,
|
_span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
ExpandResult::ok(tt.clone())
|
ExpandResult::ok(tt.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dummy_gate_test_expand(
|
fn dummy_gate_test_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let result = quote::quote! { span=>
|
let result = quote::quote! { span=>
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#tt
|
#tt
|
||||||
|
|
@ -118,47 +118,41 @@ fn dummy_gate_test_expand(
|
||||||
fn derive_expand(
|
fn derive_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let derives = match &loc.kind {
|
let derives = match &loc.kind {
|
||||||
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
|
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
|
||||||
attr_args
|
attr_args
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }))
|
return ExpandResult::ok(tt::TopSubtree::empty(tt::DelimSpan {
|
||||||
|
open: span,
|
||||||
|
close: span,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
pseudo_derive_attr_expansion(tt, derives, span)
|
pseudo_derive_attr_expansion(tt, derives, span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pseudo_derive_attr_expansion(
|
pub fn pseudo_derive_attr_expansion(
|
||||||
_: &tt::Subtree,
|
_: &tt::TopSubtree,
|
||||||
args: &tt::Subtree,
|
args: &tt::TopSubtree,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let mk_leaf = |char| {
|
let mk_leaf =
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
|char| tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, span: call_site });
|
||||||
char,
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
span: call_site,
|
|
||||||
}))
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut token_trees = Vec::new();
|
let mut token_trees = tt::TopSubtreeBuilder::new(args.top_subtree().delimiter);
|
||||||
for tt in args
|
let iter = args.token_trees().split(|tt| {
|
||||||
.token_trees
|
matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. })))
|
||||||
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
|
});
|
||||||
{
|
for tts in iter {
|
||||||
token_trees.push(mk_leaf('#'));
|
token_trees.extend([mk_leaf('#'), mk_leaf('!')]);
|
||||||
token_trees.push(mk_leaf('!'));
|
token_trees.open(tt::DelimiterKind::Bracket, call_site);
|
||||||
token_trees.push(mk_leaf('['));
|
token_trees.extend_with_tt(tts);
|
||||||
token_trees.extend(tt.iter().cloned());
|
token_trees.close(call_site);
|
||||||
token_trees.push(mk_leaf(']'));
|
|
||||||
}
|
}
|
||||||
ExpandResult::ok(tt::Subtree {
|
ExpandResult::ok(token_trees.build())
|
||||||
delimiter: args.delimiter,
|
|
||||||
token_trees: token_trees.into_boxed_slice(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ macro_rules! register_builtin {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuiltinDeriveExpander {
|
impl BuiltinDeriveExpander {
|
||||||
pub fn expander(&self) -> fn(Span, &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
pub fn expander(&self) -> fn(Span, &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
match *self {
|
match *self {
|
||||||
$( BuiltinDeriveExpander::$trait => $expand, )*
|
$( BuiltinDeriveExpander::$trait => $expand, )*
|
||||||
}
|
}
|
||||||
|
|
@ -50,9 +50,9 @@ impl BuiltinDeriveExpander {
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let span = span_with_def_site_ctxt(db, span, id);
|
let span = span_with_def_site_ctxt(db, span, id);
|
||||||
self.expander()(span, tt)
|
self.expander()(span, tt)
|
||||||
}
|
}
|
||||||
|
|
@ -85,7 +85,7 @@ fn tuple_field_iterator(span: Span, n: usize) -> impl Iterator<Item = tt::Ident>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl VariantShape {
|
impl VariantShape {
|
||||||
fn as_pattern(&self, path: tt::Subtree, span: Span) -> tt::Subtree {
|
fn as_pattern(&self, path: tt::TopSubtree, span: Span) -> tt::TopSubtree {
|
||||||
self.as_pattern_map(path, span, |it| quote!(span => #it))
|
self.as_pattern_map(path, span, |it| quote!(span => #it))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -99,10 +99,10 @@ impl VariantShape {
|
||||||
|
|
||||||
fn as_pattern_map(
|
fn as_pattern_map(
|
||||||
&self,
|
&self,
|
||||||
path: tt::Subtree,
|
path: tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
|
field_map: impl Fn(&tt::Ident) -> tt::TopSubtree,
|
||||||
) -> tt::Subtree {
|
) -> tt::TopSubtree {
|
||||||
match self {
|
match self {
|
||||||
VariantShape::Struct(fields) => {
|
VariantShape::Struct(fields) => {
|
||||||
let fields = fields.iter().map(|it| {
|
let fields = fields.iter().map(|it| {
|
||||||
|
|
@ -154,7 +154,7 @@ enum AdtShape {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AdtShape {
|
impl AdtShape {
|
||||||
fn as_pattern(&self, span: Span, name: &tt::Ident) -> Vec<tt::Subtree> {
|
fn as_pattern(&self, span: Span, name: &tt::Ident) -> Vec<tt::TopSubtree> {
|
||||||
self.as_pattern_map(name, |it| quote!(span =>#it), span)
|
self.as_pattern_map(name, |it| quote!(span =>#it), span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -176,9 +176,9 @@ impl AdtShape {
|
||||||
fn as_pattern_map(
|
fn as_pattern_map(
|
||||||
&self,
|
&self,
|
||||||
name: &tt::Ident,
|
name: &tt::Ident,
|
||||||
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
|
field_map: impl Fn(&tt::Ident) -> tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Vec<tt::Subtree> {
|
) -> Vec<tt::TopSubtree> {
|
||||||
match self {
|
match self {
|
||||||
AdtShape::Struct(s) => {
|
AdtShape::Struct(s) => {
|
||||||
vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
|
vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
|
||||||
|
|
@ -203,12 +203,12 @@ struct BasicAdtInfo {
|
||||||
/// first field is the name, and
|
/// first field is the name, and
|
||||||
/// second field is `Some(ty)` if it's a const param of type `ty`, `None` if it's a type param.
|
/// second field is `Some(ty)` if it's a const param of type `ty`, `None` if it's a type param.
|
||||||
/// third fields is where bounds, if any
|
/// third fields is where bounds, if any
|
||||||
param_types: Vec<(tt::Subtree, Option<tt::Subtree>, Option<tt::Subtree>)>,
|
param_types: Vec<(tt::TopSubtree, Option<tt::TopSubtree>, Option<tt::TopSubtree>)>,
|
||||||
where_clause: Vec<tt::Subtree>,
|
where_clause: Vec<tt::TopSubtree>,
|
||||||
associated_types: Vec<tt::Subtree>,
|
associated_types: Vec<tt::TopSubtree>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
|
fn parse_adt(tt: &tt::TopSubtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node(
|
let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node(
|
||||||
tt,
|
tt,
|
||||||
syntax_bridge::TopEntryPoint::MacroItems,
|
syntax_bridge::TopEntryPoint::MacroItems,
|
||||||
|
|
@ -276,7 +276,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
tt::TopSubtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -303,7 +303,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
tt::TopSubtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
||||||
});
|
});
|
||||||
Some(ty)
|
Some(ty)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -413,15 +413,15 @@ fn name_to_token(
|
||||||
/// therefore does not get bound by the derived trait.
|
/// therefore does not get bound by the derived trait.
|
||||||
fn expand_simple_derive(
|
fn expand_simple_derive(
|
||||||
invoc_span: Span,
|
invoc_span: Span,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
trait_path: tt::Subtree,
|
trait_path: tt::TopSubtree,
|
||||||
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
|
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let info = match parse_adt(tt, invoc_span) {
|
let info = match parse_adt(tt, invoc_span) {
|
||||||
Ok(info) => info,
|
Ok(info) => info,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::new(
|
return ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
|
tt::TopSubtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -460,12 +460,12 @@ fn expand_simple_derive(
|
||||||
ExpandResult::ok(expanded)
|
ExpandResult::ok(expanded)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn copy_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn copy_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = dollar_crate(span);
|
let krate = dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
|
expand_simple_derive(span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clone_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn clone_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = dollar_crate(span);
|
let krate = dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::clone::Clone }, |adt| {
|
expand_simple_derive(span, tt, quote! {span => #krate::clone::Clone }, |adt| {
|
||||||
if matches!(adt.shape, AdtShape::Union) {
|
if matches!(adt.shape, AdtShape::Union) {
|
||||||
|
|
@ -505,18 +505,18 @@ fn clone_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function exists since `quote! {span => => }` doesn't work.
|
/// This function exists since `quote! {span => => }` doesn't work.
|
||||||
fn fat_arrow(span: Span) -> tt::Subtree {
|
fn fat_arrow(span: Span) -> tt::TopSubtree {
|
||||||
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
|
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
|
||||||
quote! {span => #eq> }
|
quote! {span => #eq> }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function exists since `quote! {span => && }` doesn't work.
|
/// This function exists since `quote! {span => && }` doesn't work.
|
||||||
fn and_and(span: Span) -> tt::Subtree {
|
fn and_and(span: Span) -> tt::TopSubtree {
|
||||||
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
|
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
|
||||||
quote! {span => #and& }
|
quote! {span => #and& }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn default_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = &dollar_crate(span);
|
let krate = &dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::default::Default }, |adt| {
|
expand_simple_derive(span, tt, quote! {span => #krate::default::Default }, |adt| {
|
||||||
let body = match &adt.shape {
|
let body = match &adt.shape {
|
||||||
|
|
@ -555,7 +555,7 @@ fn default_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn debug_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn debug_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = &dollar_crate(span);
|
let krate = &dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
|
expand_simple_derive(span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
|
||||||
let for_variant = |name: String, v: &VariantShape| match v {
|
let for_variant = |name: String, v: &VariantShape| match v {
|
||||||
|
|
@ -627,7 +627,7 @@ fn debug_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn hash_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = &dollar_crate(span);
|
let krate = &dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::hash::Hash }, |adt| {
|
expand_simple_derive(span, tt, quote! {span => #krate::hash::Hash }, |adt| {
|
||||||
if matches!(adt.shape, AdtShape::Union) {
|
if matches!(adt.shape, AdtShape::Union) {
|
||||||
|
|
@ -674,12 +674,12 @@ fn hash_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eq_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn eq_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = dollar_crate(span);
|
let krate = dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
|
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn partial_eq_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn partial_eq_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = dollar_crate(span);
|
let krate = dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
|
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
|
||||||
if matches!(adt.shape, AdtShape::Union) {
|
if matches!(adt.shape, AdtShape::Union) {
|
||||||
|
|
@ -731,7 +731,7 @@ fn self_and_other_patterns(
|
||||||
adt: &BasicAdtInfo,
|
adt: &BasicAdtInfo,
|
||||||
name: &tt::Ident,
|
name: &tt::Ident,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
|
) -> (Vec<tt::TopSubtree>, Vec<tt::TopSubtree>) {
|
||||||
let self_patterns = adt.shape.as_pattern_map(
|
let self_patterns = adt.shape.as_pattern_map(
|
||||||
name,
|
name,
|
||||||
|it| {
|
|it| {
|
||||||
|
|
@ -751,16 +751,16 @@ fn self_and_other_patterns(
|
||||||
(self_patterns, other_patterns)
|
(self_patterns, other_patterns)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = &dollar_crate(span);
|
let krate = &dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
|
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
|
||||||
fn compare(
|
fn compare(
|
||||||
krate: &tt::Ident,
|
krate: &tt::Ident,
|
||||||
left: tt::Subtree,
|
left: tt::TopSubtree,
|
||||||
right: tt::Subtree,
|
right: tt::TopSubtree,
|
||||||
rest: tt::Subtree,
|
rest: tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> tt::Subtree {
|
) -> tt::TopSubtree {
|
||||||
let fat_arrow1 = fat_arrow(span);
|
let fat_arrow1 = fat_arrow(span);
|
||||||
let fat_arrow2 = fat_arrow(span);
|
let fat_arrow2 = fat_arrow(span);
|
||||||
quote! {span =>
|
quote! {span =>
|
||||||
|
|
@ -809,16 +809,16 @@ fn ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn partial_ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|
fn partial_ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
|
||||||
let krate = &dollar_crate(span);
|
let krate = &dollar_crate(span);
|
||||||
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
|
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
|
||||||
fn compare(
|
fn compare(
|
||||||
krate: &tt::Ident,
|
krate: &tt::Ident,
|
||||||
left: tt::Subtree,
|
left: tt::TopSubtree,
|
||||||
right: tt::Subtree,
|
right: tt::TopSubtree,
|
||||||
rest: tt::Subtree,
|
rest: tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> tt::Subtree {
|
) -> tt::TopSubtree {
|
||||||
let fat_arrow1 = fat_arrow(span);
|
let fat_arrow1 = fat_arrow(span);
|
||||||
let fat_arrow2 = fat_arrow(span);
|
let fat_arrow2 = fat_arrow(span);
|
||||||
quote! {span =>
|
quote! {span =>
|
||||||
|
|
|
||||||
|
|
@ -14,12 +14,12 @@ use syntax::{
|
||||||
use syntax_bridge::syntax_node_to_token_tree;
|
use syntax_bridge::syntax_node_to_token_tree;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin::quote::{dollar_crate, quote},
|
builtin::quote::{dollar_crate, quote, WithDelimiter},
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
|
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
|
||||||
name,
|
name,
|
||||||
span_map::SpanMap,
|
span_map::SpanMap,
|
||||||
tt::{self, DelimSpan},
|
tt::{self, DelimSpan, TtElement, TtIter},
|
||||||
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
|
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -36,7 +36,7 @@ macro_rules! register_builtin {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuiltinFnLikeExpander {
|
impl BuiltinFnLikeExpander {
|
||||||
fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> {
|
fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::TopSubtree, Span) -> ExpandResult<tt::TopSubtree> {
|
||||||
match *self {
|
match *self {
|
||||||
$( BuiltinFnLikeExpander::$kind => $expand, )*
|
$( BuiltinFnLikeExpander::$kind => $expand, )*
|
||||||
}
|
}
|
||||||
|
|
@ -44,7 +44,7 @@ macro_rules! register_builtin {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EagerExpander {
|
impl EagerExpander {
|
||||||
fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> {
|
fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::TopSubtree, Span) -> ExpandResult<tt::TopSubtree> {
|
||||||
match *self {
|
match *self {
|
||||||
$( EagerExpander::$e_kind => $e_expand, )*
|
$( EagerExpander::$e_kind => $e_expand, )*
|
||||||
}
|
}
|
||||||
|
|
@ -66,9 +66,9 @@ impl BuiltinFnLikeExpander {
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let span = span_with_def_site_ctxt(db, span, id);
|
let span = span_with_def_site_ctxt(db, span, id);
|
||||||
self.expander()(db, id, tt, span)
|
self.expander()(db, id, tt, span)
|
||||||
}
|
}
|
||||||
|
|
@ -83,9 +83,9 @@ impl EagerExpander {
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let span = span_with_def_site_ctxt(db, span, id);
|
let span = span_with_def_site_ctxt(db, span, id);
|
||||||
self.expander()(db, id, tt, span)
|
self.expander()(db, id, tt, span)
|
||||||
}
|
}
|
||||||
|
|
@ -146,24 +146,16 @@ register_builtin! {
|
||||||
(option_env, OptionEnv) => option_env_expand
|
(option_env, OptionEnv) => option_env_expand
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_pound(span: Span) -> tt::Subtree {
|
fn mk_pound(span: Span) -> tt::Leaf {
|
||||||
crate::builtin::quote::IntoTt::to_subtree(
|
crate::tt::Leaf::Punct(crate::tt::Punct { char: '#', spacing: crate::tt::Spacing::Alone, span })
|
||||||
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
|
|
||||||
char: '#',
|
|
||||||
spacing: crate::tt::Spacing::Alone,
|
|
||||||
span,
|
|
||||||
})
|
|
||||||
.into()],
|
|
||||||
span,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn module_path_expand(
|
fn module_path_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
// Just return a dummy result.
|
// Just return a dummy result.
|
||||||
ExpandResult::ok(quote! {span =>
|
ExpandResult::ok(quote! {span =>
|
||||||
"module::path"
|
"module::path"
|
||||||
|
|
@ -173,48 +165,48 @@ fn module_path_expand(
|
||||||
fn line_expand(
|
fn line_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
// dummy implementation for type-checking purposes
|
// dummy implementation for type-checking purposes
|
||||||
// Note that `line!` and `column!` will never be implemented properly, as they are by definition
|
// Note that `line!` and `column!` will never be implemented properly, as they are by definition
|
||||||
// not incremental
|
// not incremental
|
||||||
ExpandResult::ok(tt::Subtree {
|
ExpandResult::ok(tt::TopSubtree::invisible_from_leaves(
|
||||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
span,
|
||||||
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
[tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: sym::INTEGER_0.clone(),
|
symbol: sym::INTEGER_0.clone(),
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::Integer,
|
kind: tt::LitKind::Integer,
|
||||||
suffix: Some(sym::u32.clone()),
|
suffix: Some(sym::u32.clone()),
|
||||||
}))]),
|
})],
|
||||||
})
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn log_syntax_expand(
|
fn log_syntax_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
ExpandResult::ok(quote! {span =>})
|
ExpandResult::ok(quote! {span =>})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trace_macros_expand(
|
fn trace_macros_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
ExpandResult::ok(quote! {span =>})
|
ExpandResult::ok(quote! {span =>})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stringify_expand(
|
fn stringify_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let pretty = ::tt::pretty(&tt.token_trees);
|
let pretty = ::tt::pretty(tt.token_trees().flat_tokens());
|
||||||
|
|
||||||
let expanded = quote! {span =>
|
let expanded = quote! {span =>
|
||||||
#pretty
|
#pretty
|
||||||
|
|
@ -226,39 +218,35 @@ fn stringify_expand(
|
||||||
fn assert_expand(
|
fn assert_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
||||||
|
|
||||||
let mut iter = ::tt::iter::TtIter::new(tt);
|
let mut iter = tt.iter();
|
||||||
|
|
||||||
let cond = expect_fragment(
|
let cond = expect_fragment(
|
||||||
&mut iter,
|
&mut iter,
|
||||||
parser::PrefixEntryPoint::Expr,
|
parser::PrefixEntryPoint::Expr,
|
||||||
db.crate_graph()[id.lookup(db).krate].edition,
|
db.crate_graph()[id.lookup(db).krate].edition,
|
||||||
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
tt.top_subtree().delimiter.delim_span(),
|
||||||
);
|
);
|
||||||
_ = iter.expect_char(',');
|
_ = iter.expect_char(',');
|
||||||
let rest = iter.as_slice();
|
let rest = iter.remaining();
|
||||||
|
|
||||||
let dollar_crate = dollar_crate(span);
|
let dollar_crate = dollar_crate(span);
|
||||||
let expanded = match cond.value {
|
let panic_args = rest.iter();
|
||||||
Some(cond) => {
|
|
||||||
let panic_args = rest.iter().cloned();
|
|
||||||
let mac = if use_panic_2021(db, span) {
|
let mac = if use_panic_2021(db, span) {
|
||||||
quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
|
quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
|
||||||
} else {
|
} else {
|
||||||
quote! {call_site_span => #dollar_crate::panic!(##panic_args) }
|
quote! {call_site_span => #dollar_crate::panic!(##panic_args) }
|
||||||
};
|
};
|
||||||
quote! {call_site_span =>{
|
let value = cond.value;
|
||||||
if !(#cond) {
|
let expanded = quote! {call_site_span =>{
|
||||||
|
if !(#value) {
|
||||||
#mac;
|
#mac;
|
||||||
}
|
}
|
||||||
}}
|
}};
|
||||||
}
|
|
||||||
None => quote! {call_site_span =>{}},
|
|
||||||
};
|
|
||||||
|
|
||||||
match cond.err {
|
match cond.err {
|
||||||
Some(err) => ExpandResult::new(expanded, err.into()),
|
Some(err) => ExpandResult::new(expanded, err.into()),
|
||||||
|
|
@ -269,9 +257,9 @@ fn assert_expand(
|
||||||
fn file_expand(
|
fn file_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
// FIXME: RA purposefully lacks knowledge of absolute file names
|
// FIXME: RA purposefully lacks knowledge of absolute file names
|
||||||
// so just return "".
|
// so just return "".
|
||||||
let file_name = "file";
|
let file_name = "file";
|
||||||
|
|
@ -286,12 +274,12 @@ fn file_expand(
|
||||||
fn format_args_expand(
|
fn format_args_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let pound = mk_pound(span);
|
let pound = mk_pound(span);
|
||||||
let mut tt = tt.clone();
|
let mut tt = tt.clone();
|
||||||
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
|
||||||
ExpandResult::ok(quote! {span =>
|
ExpandResult::ok(quote! {span =>
|
||||||
builtin #pound format_args #tt
|
builtin #pound format_args #tt
|
||||||
})
|
})
|
||||||
|
|
@ -300,17 +288,17 @@ fn format_args_expand(
|
||||||
fn format_args_nl_expand(
|
fn format_args_nl_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let pound = mk_pound(span);
|
let pound = mk_pound(span);
|
||||||
let mut tt = tt.clone();
|
let mut tt = tt.clone();
|
||||||
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
kind: tt::LitKind::Str,
|
kind: tt::LitKind::Str,
|
||||||
..
|
..
|
||||||
}))) = tt.token_trees.first_mut()
|
}))) = tt.0.get_mut(1)
|
||||||
{
|
{
|
||||||
*text = Symbol::intern(&format_smolstr!("{}\\n", text.as_str()));
|
*text = Symbol::intern(&format_smolstr!("{}\\n", text.as_str()));
|
||||||
}
|
}
|
||||||
|
|
@ -322,11 +310,11 @@ fn format_args_nl_expand(
|
||||||
fn asm_expand(
|
fn asm_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let mut tt = tt.clone();
|
let mut tt = tt.clone();
|
||||||
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
|
||||||
let pound = mk_pound(span);
|
let pound = mk_pound(span);
|
||||||
let expanded = quote! {span =>
|
let expanded = quote! {span =>
|
||||||
builtin #pound asm #tt
|
builtin #pound asm #tt
|
||||||
|
|
@ -337,9 +325,9 @@ fn asm_expand(
|
||||||
fn cfg_expand(
|
fn cfg_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let expr = CfgExpr::parse(tt);
|
let expr = CfgExpr::parse(tt);
|
||||||
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
|
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
|
||||||
|
|
@ -350,9 +338,9 @@ fn cfg_expand(
|
||||||
fn panic_expand(
|
fn panic_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let dollar_crate = dollar_crate(span);
|
let dollar_crate = dollar_crate(span);
|
||||||
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
||||||
|
|
||||||
|
|
@ -362,19 +350,18 @@ fn panic_expand(
|
||||||
sym::panic_2015.clone()
|
sym::panic_2015.clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Expand to a macro call `$crate::panic::panic_{edition}`
|
|
||||||
let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!);
|
|
||||||
|
|
||||||
// Pass the original arguments
|
// Pass the original arguments
|
||||||
let mut subtree = tt.clone();
|
let subtree = WithDelimiter {
|
||||||
subtree.delimiter = tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
open: call_site_span,
|
open: call_site_span,
|
||||||
close: call_site_span,
|
close: call_site_span,
|
||||||
kind: tt::DelimiterKind::Parenthesis,
|
kind: tt::DelimiterKind::Parenthesis,
|
||||||
|
},
|
||||||
|
token_trees: tt.token_trees(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME(slow): quote! have a way to expand to builder to make this a vec!
|
// Expand to a macro call `$crate::panic::panic_{edition}`
|
||||||
call.push(tt::TokenTree::Subtree(subtree));
|
let call = quote!(call_site_span =>#dollar_crate::panic::#mac! #subtree);
|
||||||
|
|
||||||
ExpandResult::ok(call)
|
ExpandResult::ok(call)
|
||||||
}
|
}
|
||||||
|
|
@ -382,9 +369,9 @@ fn panic_expand(
|
||||||
fn unreachable_expand(
|
fn unreachable_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let dollar_crate = dollar_crate(span);
|
let dollar_crate = dollar_crate(span);
|
||||||
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
||||||
|
|
||||||
|
|
@ -394,19 +381,16 @@ fn unreachable_expand(
|
||||||
sym::unreachable_2015.clone()
|
sym::unreachable_2015.clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Expand to a macro call `$crate::panic::panic_{edition}`
|
|
||||||
let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!);
|
|
||||||
|
|
||||||
// Pass the original arguments
|
// Pass the original arguments
|
||||||
let mut subtree = tt.clone();
|
let mut subtree = tt.clone();
|
||||||
subtree.delimiter = tt::Delimiter {
|
*subtree.top_subtree_delimiter_mut() = tt::Delimiter {
|
||||||
open: call_site_span,
|
open: call_site_span,
|
||||||
close: call_site_span,
|
close: call_site_span,
|
||||||
kind: tt::DelimiterKind::Parenthesis,
|
kind: tt::DelimiterKind::Parenthesis,
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME(slow): quote! have a way to expand to builder to make this a vec!
|
// Expand to a macro call `$crate::panic::panic_{edition}`
|
||||||
call.push(tt::TokenTree::Subtree(subtree));
|
let call = quote!(call_site_span =>#dollar_crate::panic::#mac! #subtree);
|
||||||
|
|
||||||
ExpandResult::ok(call)
|
ExpandResult::ok(call)
|
||||||
}
|
}
|
||||||
|
|
@ -436,11 +420,11 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
|
||||||
fn compile_error_expand(
|
fn compile_error_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_id: MacroCallId,
|
_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let err = match &*tt.token_trees {
|
let err = match &*tt.0 {
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
[_, tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span: _,
|
span: _,
|
||||||
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
|
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
|
||||||
|
|
@ -455,9 +439,9 @@ fn compile_error_expand(
|
||||||
fn concat_expand(
|
fn concat_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_arg_id: MacroCallId,
|
_arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
let mut span: Option<Span> = None;
|
let mut span: Option<Span> = None;
|
||||||
|
|
@ -466,19 +450,19 @@ fn concat_expand(
|
||||||
Some(_) => (),
|
Some(_) => (),
|
||||||
None => span = Some(s),
|
None => span = Some(s),
|
||||||
};
|
};
|
||||||
for (i, mut t) in tt.token_trees.iter().enumerate() {
|
for (i, mut t) in tt.iter().enumerate() {
|
||||||
// FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
|
// FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
|
||||||
// to ensure the right parsing order, so skip the parentheses here. Ideally we'd
|
// to ensure the right parsing order, so skip the parentheses here. Ideally we'd
|
||||||
// implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
|
// implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
|
||||||
if let tt::TokenTree::Subtree(tt::Subtree { delimiter: delim, token_trees }) = t {
|
if let TtElement::Subtree(subtree, subtree_iter) = &t {
|
||||||
if let [tt] = &**token_trees {
|
if let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens() {
|
||||||
if delim.kind == tt::DelimiterKind::Parenthesis {
|
if subtree.delimiter.kind == tt::DelimiterKind::Parenthesis {
|
||||||
t = tt;
|
t = TtElement::Leaf(tt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match t {
|
match t {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {
|
TtElement::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {
|
||||||
// concat works with string and char literals, so remove any quotes.
|
// concat works with string and char literals, so remove any quotes.
|
||||||
// It also works with integer, float and boolean literals, so just use the rest
|
// It also works with integer, float and boolean literals, so just use the rest
|
||||||
// as-is.
|
// as-is.
|
||||||
|
|
@ -511,28 +495,28 @@ fn concat_expand(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// handle boolean literals
|
// handle boolean literals
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(id))
|
TtElement::Leaf(tt::Leaf::Ident(id))
|
||||||
if i % 2 == 0 && (id.sym == sym::true_ || id.sym == sym::false_) =>
|
if i % 2 == 0 && (id.sym == sym::true_ || id.sym == sym::false_) =>
|
||||||
{
|
{
|
||||||
text.push_str(id.sym.as_str());
|
text.push_str(id.sym.as_str());
|
||||||
record_span(id.span);
|
record_span(id.span);
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
TtElement::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||||
_ => {
|
_ => {
|
||||||
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let span = span.unwrap_or(tt.delimiter.open);
|
let span = span.unwrap_or_else(|| tt.top_subtree().delimiter.open);
|
||||||
ExpandResult { value: quote!(span =>#text), err }
|
ExpandResult { value: quote!(span =>#text), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn concat_bytes_expand(
|
fn concat_bytes_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_arg_id: MacroCallId,
|
_arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let mut bytes = String::new();
|
let mut bytes = String::new();
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
let mut span: Option<Span> = None;
|
let mut span: Option<Span> = None;
|
||||||
|
|
@ -541,9 +525,9 @@ fn concat_bytes_expand(
|
||||||
Some(_) => (),
|
Some(_) => (),
|
||||||
None => span = Some(s),
|
None => span = Some(s),
|
||||||
};
|
};
|
||||||
for (i, t) in tt.token_trees.iter().enumerate() {
|
for (i, t) in tt.iter().enumerate() {
|
||||||
match t {
|
match t {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind,
|
kind,
|
||||||
|
|
@ -570,10 +554,12 @@ fn concat_bytes_expand(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
TtElement::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||||
tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
|
TtElement::Subtree(tree, tree_iter)
|
||||||
|
if tree.delimiter.kind == tt::DelimiterKind::Bracket =>
|
||||||
|
{
|
||||||
if let Err(e) =
|
if let Err(e) =
|
||||||
concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span, call_site)
|
concat_bytes_expand_subtree(tree_iter, &mut bytes, &mut record_span, call_site)
|
||||||
{
|
{
|
||||||
err.get_or_insert(e);
|
err.get_or_insert(e);
|
||||||
break;
|
break;
|
||||||
|
|
@ -585,31 +571,30 @@ fn concat_bytes_expand(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let span = span.unwrap_or(tt.delimiter.open);
|
let span = span.unwrap_or(tt.top_subtree().delimiter.open);
|
||||||
ExpandResult {
|
ExpandResult {
|
||||||
value: tt::Subtree {
|
value: tt::TopSubtree::invisible_from_leaves(
|
||||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
span,
|
||||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
[tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: Symbol::intern(&bytes),
|
symbol: Symbol::intern(&bytes),
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::ByteStr,
|
kind: tt::LitKind::ByteStr,
|
||||||
suffix: None,
|
suffix: None,
|
||||||
}))]
|
})],
|
||||||
.into(),
|
),
|
||||||
},
|
|
||||||
err,
|
err,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn concat_bytes_expand_subtree(
|
fn concat_bytes_expand_subtree(
|
||||||
tree: &tt::Subtree,
|
tree_iter: TtIter<'_>,
|
||||||
bytes: &mut String,
|
bytes: &mut String,
|
||||||
mut record_span: impl FnMut(Span),
|
mut record_span: impl FnMut(Span),
|
||||||
err_span: Span,
|
err_span: Span,
|
||||||
) -> Result<(), ExpandError> {
|
) -> Result<(), ExpandError> {
|
||||||
for (ti, tt) in tree.token_trees.iter().enumerate() {
|
for (ti, tt) in tree_iter.enumerate() {
|
||||||
match tt {
|
match tt {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::Byte,
|
kind: tt::LitKind::Byte,
|
||||||
|
|
@ -620,7 +605,7 @@ fn concat_bytes_expand_subtree(
|
||||||
}
|
}
|
||||||
record_span(*span);
|
record_span(*span);
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::Integer,
|
kind: tt::LitKind::Integer,
|
||||||
|
|
@ -631,7 +616,7 @@ fn concat_bytes_expand_subtree(
|
||||||
bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32)));
|
bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
|
TtElement::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (),
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ExpandError::other(err_span, "unexpected token"));
|
return Err(ExpandError::other(err_span, "unexpected token"));
|
||||||
}
|
}
|
||||||
|
|
@ -643,17 +628,17 @@ fn concat_bytes_expand_subtree(
|
||||||
fn concat_idents_expand(
|
fn concat_idents_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_arg_id: MacroCallId,
|
_arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
let mut ident = String::new();
|
let mut ident = String::new();
|
||||||
for (i, t) in tt.token_trees.iter().enumerate() {
|
for (i, t) in tt.iter().enumerate() {
|
||||||
match t {
|
match t {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => {
|
TtElement::Leaf(tt::Leaf::Ident(id)) => {
|
||||||
ident.push_str(id.sym.as_str());
|
ident.push_str(id.sym.as_str());
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
TtElement::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
|
||||||
_ => {
|
_ => {
|
||||||
err.get_or_insert(ExpandError::other(span, "unexpected token"));
|
err.get_or_insert(ExpandError::other(span, "unexpected token"));
|
||||||
}
|
}
|
||||||
|
|
@ -685,18 +670,19 @@ fn relative_file(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
|
||||||
tt.token_trees
|
let delimiter = tt.top_subtree().delimiter;
|
||||||
.first()
|
tt.iter()
|
||||||
.ok_or(tt.delimiter.open.cover(tt.delimiter.close))
|
.next()
|
||||||
|
.ok_or(delimiter.open.cover(delimiter.close))
|
||||||
.and_then(|tt| match tt {
|
.and_then(|tt| match tt {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::Str,
|
kind: tt::LitKind::Str,
|
||||||
suffix: _,
|
suffix: _,
|
||||||
})) => Ok((unescape_str(text), *span)),
|
})) => Ok((unescape_str(text), *span)),
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::StrRaw(_),
|
kind: tt::LitKind::StrRaw(_),
|
||||||
|
|
@ -705,15 +691,19 @@ fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
||||||
// FIXME: We wrap expression fragments in parentheses which can break this expectation
|
// FIXME: We wrap expression fragments in parentheses which can break this expectation
|
||||||
// here
|
// here
|
||||||
// Remove this once we handle none delims correctly
|
// Remove this once we handle none delims correctly
|
||||||
tt::TokenTree::Subtree(tt) if tt.delimiter.kind == DelimiterKind::Parenthesis => {
|
TtElement::Subtree(tt, mut tt_iter)
|
||||||
tt.token_trees.first().and_then(|tt| match tt {
|
if tt.delimiter.kind == DelimiterKind::Parenthesis =>
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
{
|
||||||
|
tt_iter
|
||||||
|
.next()
|
||||||
|
.and_then(|tt| match tt {
|
||||||
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::Str,
|
kind: tt::LitKind::Str,
|
||||||
suffix: _,
|
suffix: _,
|
||||||
})) => Some((unescape_str(text), *span)),
|
})) => Some((unescape_str(text), *span)),
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::StrRaw(_),
|
kind: tt::LitKind::StrRaw(_),
|
||||||
|
|
@ -721,10 +711,10 @@ fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
||||||
})) => Some((text.clone(), *span)),
|
})) => Some((text.clone(), *span)),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
|
.ok_or(delimiter.open.cover(delimiter.close))
|
||||||
}
|
}
|
||||||
.ok_or(tt.delimiter.open.cover(tt.delimiter.close)),
|
TtElement::Leaf(l) => Err(*l.span()),
|
||||||
::tt::TokenTree::Leaf(l) => Err(*l.span()),
|
TtElement::Subtree(tt, _) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
|
||||||
::tt::TokenTree::Subtree(tt) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
|
|
||||||
})
|
})
|
||||||
.map_err(|span| ExpandError::other(span, "expected string literal"))
|
.map_err(|span| ExpandError::other(span, "expected string literal"))
|
||||||
}
|
}
|
||||||
|
|
@ -732,13 +722,16 @@ fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
|
||||||
fn include_expand(
|
fn include_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let file_id = match include_input_to_file_id(db, arg_id, tt) {
|
let file_id = match include_input_to_file_id(db, arg_id, tt) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
return ExpandResult::new(
|
||||||
|
tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
|
||||||
|
e,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let span_map = db.real_span_map(file_id);
|
let span_map = db.real_span_map(file_id);
|
||||||
|
|
@ -754,7 +747,7 @@ fn include_expand(
|
||||||
pub fn include_input_to_file_id(
|
pub fn include_input_to_file_id(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
arg: &tt::Subtree,
|
arg: &tt::TopSubtree,
|
||||||
) -> Result<EditionedFileId, ExpandError> {
|
) -> Result<EditionedFileId, ExpandError> {
|
||||||
let (s, span) = parse_string(arg)?;
|
let (s, span) = parse_string(arg)?;
|
||||||
relative_file(db, arg_id, s.as_str(), false, span)
|
relative_file(db, arg_id, s.as_str(), false, span)
|
||||||
|
|
@ -763,32 +756,35 @@ pub fn include_input_to_file_id(
|
||||||
fn include_bytes_expand(
|
fn include_bytes_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_arg_id: MacroCallId,
|
_arg_id: MacroCallId,
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
// FIXME: actually read the file here if the user asked for macro expansion
|
// FIXME: actually read the file here if the user asked for macro expansion
|
||||||
let res = tt::Subtree {
|
let res = tt::TopSubtree::invisible_from_leaves(
|
||||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
span,
|
||||||
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
[tt::Leaf::Literal(tt::Literal {
|
||||||
symbol: Symbol::empty(),
|
symbol: Symbol::empty(),
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::ByteStrRaw(1),
|
kind: tt::LitKind::ByteStrRaw(1),
|
||||||
suffix: None,
|
suffix: None,
|
||||||
}))]),
|
})],
|
||||||
};
|
);
|
||||||
ExpandResult::ok(res)
|
ExpandResult::ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn include_str_expand(
|
fn include_str_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let (path, span) = match parse_string(tt) {
|
let (path, span) = match parse_string(tt) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
return ExpandResult::new(
|
||||||
|
tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
|
||||||
|
e,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -817,13 +813,16 @@ fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &Symbol) ->
|
||||||
fn env_expand(
|
fn env_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let (key, span) = match parse_string(tt) {
|
let (key, span) = match parse_string(tt) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
return ExpandResult::new(
|
||||||
|
tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
|
||||||
|
e,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -852,14 +851,14 @@ fn env_expand(
|
||||||
fn option_env_expand(
|
fn option_env_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
let (key, span) = match parse_string(tt) {
|
let (key, span) = match parse_string(tt) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::new(
|
return ExpandResult::new(
|
||||||
tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }),
|
tt::TopSubtree::empty(DelimSpan { open: call_site, close: call_site }),
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -879,11 +878,11 @@ fn option_env_expand(
|
||||||
fn quote_expand(
|
fn quote_expand(
|
||||||
_db: &dyn ExpandDatabase,
|
_db: &dyn ExpandDatabase,
|
||||||
_arg_id: MacroCallId,
|
_arg_id: MacroCallId,
|
||||||
_tt: &tt::Subtree,
|
_tt: &tt::TopSubtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
ExpandResult::new(
|
ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
|
tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span }),
|
||||||
ExpandError::other(span, "quote! is not implemented"),
|
ExpandError::other(span, "quote! is not implemented"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ use span::Span;
|
||||||
use syntax::ToSmolStr;
|
use syntax::ToSmolStr;
|
||||||
use tt::IdentIsRaw;
|
use tt::IdentIsRaw;
|
||||||
|
|
||||||
use crate::name::Name;
|
use crate::{name::Name, tt::TopSubtreeBuilder};
|
||||||
|
|
||||||
pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
|
pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
|
||||||
tt::Ident { sym: sym::dollar_crate.clone(), span, is_raw: tt::IdentIsRaw::No }
|
tt::Ident { sym: sym::dollar_crate.clone(), span, is_raw: tt::IdentIsRaw::No }
|
||||||
|
|
@ -20,119 +20,93 @@ pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! quote_impl__ {
|
macro_rules! quote_impl__ {
|
||||||
($span:ident) => {
|
($span:ident $builder:ident) => {};
|
||||||
Vec::<$crate::tt::TokenTree>::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
|
( @SUBTREE($span:ident $builder:ident) $delim:ident $($tt:tt)* ) => {
|
||||||
{
|
{
|
||||||
let children = $crate::builtin::quote::__quote!($span $($tt)*);
|
$builder.open($crate::tt::DelimiterKind::$delim, $span);
|
||||||
$crate::tt::Subtree {
|
$crate::builtin::quote::__quote!($span $builder $($tt)*);
|
||||||
delimiter: $crate::tt::Delimiter {
|
$builder.close($span);
|
||||||
kind: $crate::tt::DelimiterKind::$delim,
|
|
||||||
open: $span,
|
|
||||||
close: $span,
|
|
||||||
},
|
|
||||||
token_trees: $crate::builtin::quote::IntoTt::to_tokens(children).into_boxed_slice(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
( @PUNCT($span:ident) $first:literal ) => {
|
( @PUNCT($span:ident $builder:ident) $first:literal ) => {
|
||||||
{
|
$builder.push(
|
||||||
vec![
|
|
||||||
$crate::tt::Leaf::Punct($crate::tt::Punct {
|
$crate::tt::Leaf::Punct($crate::tt::Punct {
|
||||||
char: $first,
|
char: $first,
|
||||||
spacing: $crate::tt::Spacing::Alone,
|
spacing: $crate::tt::Spacing::Alone,
|
||||||
span: $span,
|
span: $span,
|
||||||
}).into()
|
})
|
||||||
]
|
);
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
|
( @PUNCT($span:ident $builder:ident) $first:literal, $sec:literal ) => {
|
||||||
{
|
$builder.extend([
|
||||||
vec![
|
|
||||||
$crate::tt::Leaf::Punct($crate::tt::Punct {
|
$crate::tt::Leaf::Punct($crate::tt::Punct {
|
||||||
char: $first,
|
char: $first,
|
||||||
spacing: $crate::tt::Spacing::Joint,
|
spacing: $crate::tt::Spacing::Joint,
|
||||||
span: $span,
|
span: $span,
|
||||||
}).into(),
|
}),
|
||||||
$crate::tt::Leaf::Punct($crate::tt::Punct {
|
$crate::tt::Leaf::Punct($crate::tt::Punct {
|
||||||
char: $sec,
|
char: $sec,
|
||||||
spacing: $crate::tt::Spacing::Alone,
|
spacing: $crate::tt::Spacing::Alone,
|
||||||
span: $span,
|
span: $span,
|
||||||
}).into()
|
})
|
||||||
]
|
]);
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// hash variable
|
// hash variable
|
||||||
($span:ident # $first:ident $($tail:tt)* ) => {
|
($span:ident $builder:ident # $first:ident $($tail:tt)* ) => {
|
||||||
{
|
$crate::builtin::quote::ToTokenTree::to_tokens($first, $span, $builder);
|
||||||
let token = $crate::builtin::quote::ToTokenTree::to_token($first, $span);
|
$crate::builtin::quote::__quote!($span $builder $($tail)*);
|
||||||
let mut tokens = vec![token.into()];
|
|
||||||
let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*));
|
|
||||||
tokens.append(&mut tail_tokens);
|
|
||||||
tokens
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
($span:ident ## $first:ident $($tail:tt)* ) => {
|
($span:ident $builder:ident ## $first:ident $($tail:tt)* ) => {{
|
||||||
{
|
::std::iter::IntoIterator::into_iter($first).for_each(|it| $crate::builtin::quote::ToTokenTree::to_tokens(it, $span, $builder));
|
||||||
let mut tokens = $first.into_iter().map(|it| $crate::builtin::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
|
$crate::builtin::quote::__quote!($span $builder $($tail)*);
|
||||||
let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*));
|
}};
|
||||||
tokens.append(&mut tail_tokens);
|
|
||||||
tokens
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Brace
|
// Brace
|
||||||
($span:ident { $($tt:tt)* } ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Brace $($tt)*) };
|
($span:ident $builder:ident { $($tt:tt)* } ) => { $crate::builtin::quote::__quote!(@SUBTREE($span $builder) Brace $($tt)*) };
|
||||||
// Bracket
|
// Bracket
|
||||||
($span:ident [ $($tt:tt)* ] ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Bracket $($tt)*) };
|
($span:ident $builder:ident [ $($tt:tt)* ] ) => { $crate::builtin::quote::__quote!(@SUBTREE($span $builder) Bracket $($tt)*) };
|
||||||
// Parenthesis
|
// Parenthesis
|
||||||
($span:ident ( $($tt:tt)* ) ) => { $crate::builtin::quote::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
|
($span:ident $builder:ident ( $($tt:tt)* ) ) => { $crate::builtin::quote::__quote!(@SUBTREE($span $builder) Parenthesis $($tt)*) };
|
||||||
|
|
||||||
// Literal
|
// Literal
|
||||||
($span:ident $tt:literal ) => { vec![$crate::builtin::quote::ToTokenTree::to_token($tt, $span).into()] };
|
($span:ident $builder:ident $tt:literal ) => { $crate::builtin::quote::ToTokenTree::to_tokens($tt, $span, $builder) };
|
||||||
// Ident
|
// Ident
|
||||||
($span:ident $tt:ident ) => {
|
($span:ident $builder:ident $tt:ident ) => {
|
||||||
vec![ {
|
$builder.push(
|
||||||
$crate::tt::Leaf::Ident($crate::tt::Ident {
|
$crate::tt::Leaf::Ident($crate::tt::Ident {
|
||||||
sym: intern::Symbol::intern(stringify!($tt)),
|
sym: intern::Symbol::intern(stringify!($tt)),
|
||||||
span: $span,
|
span: $span,
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
}).into()
|
})
|
||||||
}]
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Puncts
|
// Puncts
|
||||||
// FIXME: Not all puncts are handled
|
// FIXME: Not all puncts are handled
|
||||||
($span:ident -> ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '-', '>')};
|
($span:ident $builder:ident -> ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '-', '>')};
|
||||||
($span:ident => ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '=', '>')};
|
($span:ident $builder:ident => ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '=', '>')};
|
||||||
($span:ident & ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '&')};
|
($span:ident $builder:ident & ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '&')};
|
||||||
($span:ident , ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ',')};
|
($span:ident $builder:ident , ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) ',')};
|
||||||
($span:ident : ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ':')};
|
($span:ident $builder:ident : ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) ':')};
|
||||||
($span:ident ; ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ';')};
|
($span:ident $builder:ident ; ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) ';')};
|
||||||
($span:ident :: ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ':', ':')};
|
($span:ident $builder:ident :: ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) ':', ':')};
|
||||||
($span:ident . ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '.')};
|
($span:ident $builder:ident . ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '.')};
|
||||||
($span:ident < ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '<')};
|
($span:ident $builder:ident < ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '<')};
|
||||||
($span:ident > ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '>')};
|
($span:ident $builder:ident > ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '>')};
|
||||||
($span:ident ! ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '!')};
|
($span:ident $builder:ident ! ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '!')};
|
||||||
($span:ident # ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '#')};
|
($span:ident $builder:ident # ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '#')};
|
||||||
($span:ident $ ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '$')};
|
($span:ident $builder:ident $ ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '$')};
|
||||||
($span:ident * ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '*')};
|
($span:ident $builder:ident * ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '*')};
|
||||||
|
|
||||||
($span:ident $first:tt $($tail:tt)+ ) => {
|
($span:ident $builder:ident $first:tt $($tail:tt)+ ) => {{
|
||||||
{
|
$crate::builtin::quote::__quote!($span $builder $first);
|
||||||
let mut tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $first ));
|
$crate::builtin::quote::__quote!($span $builder $($tail)*);
|
||||||
let mut tail_tokens = $crate::builtin::quote::IntoTt::to_tokens($crate::builtin::quote::__quote!($span $($tail)*));
|
}};
|
||||||
|
|
||||||
tokens.append(&mut tail_tokens);
|
|
||||||
tokens
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
pub use quote_impl__ as __quote;
|
pub use quote_impl__ as __quote;
|
||||||
|
|
||||||
|
|
@ -141,52 +115,68 @@ pub use quote_impl__ as __quote;
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! quote {
|
macro_rules! quote {
|
||||||
($span:ident=> $($tt:tt)* ) => {
|
($span:ident=> $($tt:tt)* ) => {
|
||||||
$crate::builtin::quote::IntoTt::to_subtree($crate::builtin::quote::__quote!($span $($tt)*), $span)
|
{
|
||||||
|
let mut builder = $crate::tt::TopSubtreeBuilder::new($crate::tt::Delimiter {
|
||||||
|
kind: $crate::tt::DelimiterKind::Invisible,
|
||||||
|
open: $span,
|
||||||
|
close: $span,
|
||||||
|
});
|
||||||
|
#[allow(unused)]
|
||||||
|
let builder_ref = &mut builder;
|
||||||
|
$crate::builtin::quote::__quote!($span builder_ref $($tt)*);
|
||||||
|
builder.build_skip_top_subtree()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub(super) use quote;
|
pub(super) use quote;
|
||||||
|
|
||||||
pub trait IntoTt {
|
|
||||||
fn to_subtree(self, span: Span) -> crate::tt::Subtree;
|
|
||||||
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoTt for Vec<crate::tt::TokenTree> {
|
|
||||||
fn to_subtree(self, span: Span) -> crate::tt::Subtree {
|
|
||||||
crate::tt::Subtree {
|
|
||||||
delimiter: crate::tt::Delimiter::invisible_spanned(span),
|
|
||||||
token_trees: self.into_boxed_slice(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoTt for crate::tt::Subtree {
|
|
||||||
fn to_subtree(self, _: Span) -> crate::tt::Subtree {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
|
|
||||||
vec![crate::tt::TokenTree::Subtree(self)]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait ToTokenTree {
|
pub trait ToTokenTree {
|
||||||
fn to_token(self, span: Span) -> crate::tt::TokenTree;
|
fn to_tokens(self, span: Span, builder: &mut TopSubtreeBuilder);
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokenTree for crate::tt::TokenTree {
|
/// Wraps `TokenTreesView` with a delimiter (a subtree, but without allocating).
|
||||||
fn to_token(self, _: Span) -> crate::tt::TokenTree {
|
pub struct WithDelimiter<'a> {
|
||||||
self
|
pub delimiter: crate::tt::Delimiter,
|
||||||
|
pub token_trees: crate::tt::TokenTreesView<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTokenTree for WithDelimiter<'_> {
|
||||||
|
fn to_tokens(self, span: Span, builder: &mut TopSubtreeBuilder) {
|
||||||
|
builder.open(self.delimiter.kind, self.delimiter.open);
|
||||||
|
self.token_trees.to_tokens(span, builder);
|
||||||
|
builder.close(self.delimiter.close);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToTokenTree for crate::tt::Subtree {
|
impl ToTokenTree for crate::tt::TokenTreesView<'_> {
|
||||||
fn to_token(self, _: Span) -> crate::tt::TokenTree {
|
fn to_tokens(self, _: Span, builder: &mut TopSubtreeBuilder) {
|
||||||
self.into()
|
builder.extend_with_tt(self);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTokenTree for crate::tt::SubtreeView<'_> {
|
||||||
|
fn to_tokens(self, _: Span, builder: &mut TopSubtreeBuilder) {
|
||||||
|
builder.extend_with_tt(self.as_token_trees());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTokenTree for crate::tt::TopSubtree {
|
||||||
|
fn to_tokens(self, _: Span, builder: &mut TopSubtreeBuilder) {
|
||||||
|
builder.extend_tt_dangerous(self.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTokenTree for crate::tt::TtElement<'_> {
|
||||||
|
fn to_tokens(self, _: Span, builder: &mut TopSubtreeBuilder) {
|
||||||
|
match self {
|
||||||
|
crate::tt::TtElement::Leaf(leaf) => builder.push(leaf.clone()),
|
||||||
|
crate::tt::TtElement::Subtree(subtree, subtree_iter) => {
|
||||||
|
builder.extend_tt_dangerous(
|
||||||
|
std::iter::once(crate::tt::TokenTree::Subtree(subtree.clone()))
|
||||||
|
.chain(subtree_iter.remaining().flat_tokens().iter().cloned()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -194,18 +184,17 @@ macro_rules! impl_to_to_tokentrees {
|
||||||
($($span:ident: $ty:ty => $this:ident $im:block;)*) => {
|
($($span:ident: $ty:ty => $this:ident $im:block;)*) => {
|
||||||
$(
|
$(
|
||||||
impl ToTokenTree for $ty {
|
impl ToTokenTree for $ty {
|
||||||
fn to_token($this, $span: Span) -> crate::tt::TokenTree {
|
fn to_tokens($this, $span: Span, builder: &mut TopSubtreeBuilder) {
|
||||||
let leaf: crate::tt::Leaf = $im.into();
|
let leaf: crate::tt::Leaf = $im.into();
|
||||||
leaf.into()
|
builder.push(leaf);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)*
|
)*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ToTokenTree + Clone> ToTokenTree for &T {
|
impl<T: ToTokenTree + Clone> ToTokenTree for &T {
|
||||||
fn to_token(self, span: Span) -> crate::tt::TokenTree {
|
fn to_tokens(self, span: Span, builder: &mut TopSubtreeBuilder) {
|
||||||
self.clone().to_token(span)
|
self.clone().to_tokens(span, builder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -316,18 +305,15 @@ mod tests {
|
||||||
// }
|
// }
|
||||||
let struct_name = mk_ident("Foo");
|
let struct_name = mk_ident("Foo");
|
||||||
let fields = [mk_ident("name"), mk_ident("id")];
|
let fields = [mk_ident("name"), mk_ident("id")];
|
||||||
let fields = fields
|
let fields = fields.iter().map(|it| quote!(DUMMY =>#it: self.#it.clone(), ));
|
||||||
.iter()
|
|
||||||
.flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees.into_vec());
|
|
||||||
|
|
||||||
let list = crate::tt::Subtree {
|
let mut builder = tt::TopSubtreeBuilder::new(crate::tt::Delimiter {
|
||||||
delimiter: crate::tt::Delimiter {
|
|
||||||
kind: crate::tt::DelimiterKind::Brace,
|
kind: crate::tt::DelimiterKind::Brace,
|
||||||
open: DUMMY,
|
open: DUMMY,
|
||||||
close: DUMMY,
|
close: DUMMY,
|
||||||
},
|
});
|
||||||
token_trees: fields.collect(),
|
fields.for_each(|field| builder.extend_with_tt(field.view().as_token_trees()));
|
||||||
};
|
let list = builder.build();
|
||||||
|
|
||||||
let quoted = quote! {DUMMY =>
|
let quoted = quote! {DUMMY =>
|
||||||
impl Clone for #struct_name {
|
impl Clone for #struct_name {
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ use crate::{
|
||||||
MacroDefId, MacroDefKind, MacroFileId,
|
MacroDefId, MacroDefKind, MacroFileId,
|
||||||
};
|
};
|
||||||
/// This is just to ensure the types of smart_macro_arg and macro_arg are the same
|
/// This is just to ensure the types of smart_macro_arg and macro_arg are the same
|
||||||
type MacroArgResult = (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span);
|
type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
|
||||||
/// Total limit on the number of tokens produced by any macro invocation.
|
/// Total limit on the number of tokens produced by any macro invocation.
|
||||||
///
|
///
|
||||||
/// If an invocation produces more tokens than this limit, it will not be stored in the database and
|
/// If an invocation produces more tokens than this limit, it will not be stored in the database and
|
||||||
|
|
@ -123,7 +123,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
/// proc macros, since they are not deterministic in general, and
|
/// proc macros, since they are not deterministic in general, and
|
||||||
/// non-determinism breaks salsa in a very, very, very bad way.
|
/// non-determinism breaks salsa in a very, very, very bad way.
|
||||||
/// @edwin0cheng heroically debugged this once! See #4315 for details
|
/// @edwin0cheng heroically debugged this once! See #4315 for details
|
||||||
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::TopSubtree>>;
|
||||||
/// Retrieves the span to be used for a proc-macro expansions spans.
|
/// Retrieves the span to be used for a proc-macro expansions spans.
|
||||||
/// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
|
/// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
|
||||||
/// directly depend on as that would cause to frequent invalidations, mainly because of the
|
/// directly depend on as that would cause to frequent invalidations, mainly because of the
|
||||||
|
|
@ -251,7 +251,7 @@ pub fn expand_speculative(
|
||||||
span,
|
span,
|
||||||
DocCommentDesugarMode::ProcMacro,
|
DocCommentDesugarMode::ProcMacro,
|
||||||
);
|
);
|
||||||
tree.delimiter = tt::Delimiter::invisible_spanned(span);
|
*tree.top_subtree_delimiter_mut() = tt::Delimiter::invisible_spanned(span);
|
||||||
|
|
||||||
Some(tree)
|
Some(tree)
|
||||||
}
|
}
|
||||||
|
|
@ -266,7 +266,7 @@ pub fn expand_speculative(
|
||||||
let mut speculative_expansion = match loc.def.kind {
|
let mut speculative_expansion = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(ast, expander, _) => {
|
MacroDefKind::ProcMacro(ast, expander, _) => {
|
||||||
let span = db.proc_macro_span(ast);
|
let span = db.proc_macro_span(ast);
|
||||||
tt.delimiter = tt::Delimiter::invisible_spanned(span);
|
*tt.top_subtree_delimiter_mut() = tt::Delimiter::invisible_spanned(span);
|
||||||
expander.expand(
|
expander.expand(
|
||||||
db,
|
db,
|
||||||
loc.def.krate,
|
loc.def.krate,
|
||||||
|
|
@ -429,10 +429,10 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
||||||
|
|
||||||
let dummy_tt = |kind| {
|
let dummy_tt = |kind| {
|
||||||
(
|
(
|
||||||
Arc::new(tt::Subtree {
|
Arc::new(tt::TopSubtree::from_token_trees(
|
||||||
delimiter: tt::Delimiter { open: span, close: span, kind },
|
tt::Delimiter { open: span, close: span, kind },
|
||||||
token_trees: Box::default(),
|
tt::TokenTreesView::new(&[]),
|
||||||
}),
|
)),
|
||||||
SyntaxFixupUndoInfo::default(),
|
SyntaxFixupUndoInfo::default(),
|
||||||
span,
|
span,
|
||||||
)
|
)
|
||||||
|
|
@ -479,7 +479,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
||||||
);
|
);
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
tt.delimiter.kind = tt::DelimiterKind::Invisible;
|
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
|
||||||
}
|
}
|
||||||
return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span);
|
return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span);
|
||||||
}
|
}
|
||||||
|
|
@ -537,7 +537,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
tt.delimiter.kind = tt::DelimiterKind::Invisible;
|
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
|
||||||
}
|
}
|
||||||
|
|
||||||
(Arc::new(tt), undo_info, span)
|
(Arc::new(tt), undo_info, span)
|
||||||
|
|
@ -592,7 +592,7 @@ fn macro_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
macro_call_id: MacroCallId,
|
macro_call_id: MacroCallId,
|
||||||
loc: MacroCallLoc,
|
loc: MacroCallLoc,
|
||||||
) -> ExpandResult<(CowArc<tt::Subtree>, MatchedArmIndex)> {
|
) -> ExpandResult<(CowArc<tt::TopSubtree>, MatchedArmIndex)> {
|
||||||
let _p = tracing::info_span!("macro_expand").entered();
|
let _p = tracing::info_span!("macro_expand").entered();
|
||||||
|
|
||||||
let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
|
let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
|
||||||
|
|
@ -655,12 +655,7 @@ fn macro_expand(
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
if let Err(value) = check_tt_count(&tt) {
|
if let Err(value) = check_tt_count(&tt) {
|
||||||
return value
|
return value
|
||||||
.map(|()| {
|
.map(|()| CowArc::Owned(tt::TopSubtree::empty(tt::DelimSpan::from_single(span))))
|
||||||
CowArc::Owned(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
|
||||||
token_trees: Box::new([]),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.zip_val(matched_arm);
|
.zip_val(matched_arm);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -679,7 +674,10 @@ fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId<ast::Fn>) -> Span {
|
||||||
span_map.span_for_range(range)
|
span_map.span_for_range(range)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
fn expand_proc_macro(
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
id: MacroCallId,
|
||||||
|
) -> ExpandResult<Arc<tt::TopSubtree>> {
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let (macro_arg, undo_info, span) = db.macro_arg_considering_derives(id, &loc.kind);
|
let (macro_arg, undo_info, span) = db.macro_arg_considering_derives(id, &loc.kind);
|
||||||
|
|
||||||
|
|
@ -709,12 +707,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
|
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
if let Err(value) = check_tt_count(&tt) {
|
if let Err(value) = check_tt_count(&tt) {
|
||||||
return value.map(|()| {
|
return value.map(|()| Arc::new(tt::TopSubtree::empty(tt::DelimSpan::from_single(span))));
|
||||||
Arc::new(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
|
||||||
token_trees: Box::new([]),
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fixup::reverse_fixups(&mut tt, &undo_info);
|
fixup::reverse_fixups(&mut tt, &undo_info);
|
||||||
|
|
@ -723,7 +716,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_tree_to_syntax_node(
|
fn token_tree_to_syntax_node(
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
edition: parser::Edition,
|
edition: parser::Edition,
|
||||||
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
|
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
|
||||||
|
|
@ -737,7 +730,8 @@ fn token_tree_to_syntax_node(
|
||||||
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
|
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
|
||||||
|
let tt = tt.top_subtree();
|
||||||
let count = tt.count();
|
let count = tt.count();
|
||||||
if TOKEN_LIMIT.check(count).is_err() {
|
if TOKEN_LIMIT.check(count).is_err() {
|
||||||
Err(ExpandResult {
|
Err(ExpandResult {
|
||||||
|
|
|
||||||
|
|
@ -26,14 +26,14 @@ impl DeclarativeMacroExpander {
|
||||||
pub fn expand(
|
pub fn expand(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
tt: tt::Subtree,
|
tt: tt::TopSubtree,
|
||||||
call_id: MacroCallId,
|
call_id: MacroCallId,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<(tt::Subtree, Option<u32>)> {
|
) -> ExpandResult<(tt::TopSubtree, Option<u32>)> {
|
||||||
let loc = db.lookup_intern_macro_call(call_id);
|
let loc = db.lookup_intern_macro_call(call_id);
|
||||||
match self.mac.err() {
|
match self.mac.err() {
|
||||||
Some(_) => ExpandResult::new(
|
Some(_) => ExpandResult::new(
|
||||||
(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), None),
|
(tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span }), None),
|
||||||
ExpandError::new(span, ExpandErrorKind::MacroDefinition),
|
ExpandError::new(span, ExpandErrorKind::MacroDefinition),
|
||||||
),
|
),
|
||||||
None => self
|
None => self
|
||||||
|
|
@ -50,13 +50,13 @@ impl DeclarativeMacroExpander {
|
||||||
|
|
||||||
pub fn expand_unhygienic(
|
pub fn expand_unhygienic(
|
||||||
&self,
|
&self,
|
||||||
tt: tt::Subtree,
|
tt: tt::TopSubtree,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
def_site_edition: Edition,
|
def_site_edition: Edition,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
match self.mac.err() {
|
match self.mac.err() {
|
||||||
Some(_) => ExpandResult::new(
|
Some(_) => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::TopSubtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::new(call_site, ExpandErrorKind::MacroDefinition),
|
ExpandError::new(call_site, ExpandErrorKind::MacroDefinition),
|
||||||
),
|
),
|
||||||
None => self
|
None => self
|
||||||
|
|
@ -78,7 +78,7 @@ impl DeclarativeMacroExpander {
|
||||||
let transparency = |node| {
|
let transparency = |node| {
|
||||||
// ... would be nice to have the item tree here
|
// ... would be nice to have the item tree here
|
||||||
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
||||||
match &*attrs
|
match attrs
|
||||||
.iter()
|
.iter()
|
||||||
.find(|it| {
|
.find(|it| {
|
||||||
it.path
|
it.path
|
||||||
|
|
@ -87,7 +87,8 @@ impl DeclarativeMacroExpander {
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
})?
|
})?
|
||||||
.token_tree_value()?
|
.token_tree_value()?
|
||||||
.token_trees
|
.token_trees()
|
||||||
|
.flat_tokens()
|
||||||
{
|
{
|
||||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
|
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
|
||||||
s if *s == sym::transparent => Some(Transparency::Transparent),
|
s if *s == sym::transparent => Some(Transparency::Transparent),
|
||||||
|
|
|
||||||
|
|
@ -89,7 +89,7 @@ pub fn expand_eager_macro_input(
|
||||||
DocCommentDesugarMode::Mbe,
|
DocCommentDesugarMode::Mbe,
|
||||||
);
|
);
|
||||||
|
|
||||||
subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;
|
subtree.top_subtree_delimiter_mut().kind = crate::tt::DelimiterKind::Invisible;
|
||||||
|
|
||||||
let loc = MacroCallLoc {
|
let loc = MacroCallLoc {
|
||||||
def,
|
def,
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
|
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::SmallVec;
|
|
||||||
use span::{
|
use span::{
|
||||||
ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER,
|
ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER,
|
||||||
ROOT_ERASED_FILE_AST_ID,
|
ROOT_ERASED_FILE_AST_ID,
|
||||||
|
|
@ -19,7 +18,7 @@ use tt::Spacing;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
span_map::SpanMapRef,
|
span_map::SpanMapRef,
|
||||||
tt::{Ident, Leaf, Punct, Subtree},
|
tt::{self, Ident, Leaf, Punct, TopSubtree},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
||||||
|
|
@ -36,7 +35,7 @@ pub(crate) struct SyntaxFixups {
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
pub struct SyntaxFixupUndoInfo {
|
pub struct SyntaxFixupUndoInfo {
|
||||||
// FIXME: ThinArc<[Subtree]>
|
// FIXME: ThinArc<[Subtree]>
|
||||||
original: Option<Arc<Box<[Subtree]>>>,
|
original: Option<Arc<Box<[TopSubtree]>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SyntaxFixupUndoInfo {
|
impl SyntaxFixupUndoInfo {
|
||||||
|
|
@ -369,68 +368,118 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
||||||
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
pub(crate) fn reverse_fixups(tt: &mut TopSubtree, undo_info: &SyntaxFixupUndoInfo) {
|
||||||
let Some(undo_info) = undo_info.original.as_deref() else { return };
|
let Some(undo_info) = undo_info.original.as_deref() else { return };
|
||||||
let undo_info = &**undo_info;
|
let undo_info = &**undo_info;
|
||||||
|
let delimiter = tt.top_subtree_delimiter_mut();
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
if never!(
|
if never!(
|
||||||
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
||||||
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
|| delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
||||||
) {
|
) {
|
||||||
let span = |file_id| Span {
|
let span = |file_id| Span {
|
||||||
range: TextRange::empty(TextSize::new(0)),
|
range: TextRange::empty(TextSize::new(0)),
|
||||||
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
ctx: SyntaxContextId::ROOT,
|
ctx: SyntaxContextId::ROOT,
|
||||||
};
|
};
|
||||||
tt.delimiter.open = span(tt.delimiter.open.anchor.file_id);
|
delimiter.open = span(delimiter.open.anchor.file_id);
|
||||||
tt.delimiter.close = span(tt.delimiter.close.anchor.file_id);
|
delimiter.close = span(delimiter.close.anchor.file_id);
|
||||||
}
|
}
|
||||||
reverse_fixups_(tt, undo_info);
|
reverse_fixups_(tt, undo_info);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
enum TransformTtAction<'a> {
|
||||||
let tts = std::mem::take(&mut tt.token_trees).into_vec();
|
Keep,
|
||||||
tt.token_trees = tts
|
ReplaceWith(tt::TokenTreesView<'a>),
|
||||||
.into_iter()
|
}
|
||||||
// delete all fake nodes
|
|
||||||
.filter(|tt| match tt {
|
impl TransformTtAction<'_> {
|
||||||
|
fn remove() -> Self {
|
||||||
|
Self::ReplaceWith(tt::TokenTreesView::new(&[]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transform_tt<'a, 'b>(
|
||||||
|
tt: &'a mut Vec<tt::TokenTree>,
|
||||||
|
mut callback: impl FnMut(&mut tt::TokenTree) -> TransformTtAction<'b>,
|
||||||
|
) {
|
||||||
|
let mut subtrees_stack = Vec::new();
|
||||||
|
let mut i = 0;
|
||||||
|
while i < tt.len() {
|
||||||
|
while let Some(&subtree_idx) = subtrees_stack.last() {
|
||||||
|
let tt::TokenTree::Subtree(subtree) = &tt[subtree_idx] else {
|
||||||
|
unreachable!("non-subtree on subtrees stack");
|
||||||
|
};
|
||||||
|
if subtree_idx + 1 + subtree.usize_len() == i {
|
||||||
|
subtrees_stack.pop();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let action = callback(&mut tt[i]);
|
||||||
|
match action {
|
||||||
|
TransformTtAction::Keep => {}
|
||||||
|
TransformTtAction::ReplaceWith(replacement) => {
|
||||||
|
let old_len = 1 + match &tt[i] {
|
||||||
|
tt::TokenTree::Leaf(_) => 0,
|
||||||
|
tt::TokenTree::Subtree(subtree) => subtree.usize_len(),
|
||||||
|
};
|
||||||
|
let len_diff = replacement.len() as i64 - old_len as i64;
|
||||||
|
tt.splice(i..i + old_len, replacement.flat_tokens().iter().cloned());
|
||||||
|
i = i.checked_add_signed(len_diff as isize).unwrap();
|
||||||
|
|
||||||
|
for &subtree_idx in &subtrees_stack {
|
||||||
|
let tt::TokenTree::Subtree(subtree) = &mut tt[subtree_idx] else {
|
||||||
|
unreachable!("non-subtree on subtrees stack");
|
||||||
|
};
|
||||||
|
subtree.len = (subtree.len as i64 + len_diff).try_into().unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// `tt[i]` might have been removed.
|
||||||
|
if let Some(tt::TokenTree::Subtree(_)) = tt.get(i) {
|
||||||
|
subtrees_stack.push(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reverse_fixups_(tt: &mut TopSubtree, undo_info: &[TopSubtree]) {
|
||||||
|
let mut tts = std::mem::take(&mut tt.0).into_vec();
|
||||||
|
transform_tt(&mut tts, |tt| match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
let span = leaf.span();
|
let span = leaf.span();
|
||||||
let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID;
|
let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID;
|
||||||
let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
|
let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
|
||||||
is_real_leaf || is_replaced_node
|
if !is_real_leaf && !is_replaced_node {
|
||||||
|
return TransformTtAction::remove();
|
||||||
}
|
}
|
||||||
tt::TokenTree::Subtree(_) => true,
|
|
||||||
})
|
if !is_real_leaf {
|
||||||
.flat_map(|tt| match tt {
|
// we have a fake node here, we need to replace it again with the original
|
||||||
tt::TokenTree::Subtree(mut tt) => {
|
let original = &undo_info[u32::from(leaf.span().range.start()) as usize];
|
||||||
|
TransformTtAction::ReplaceWith(original.view().strip_invisible())
|
||||||
|
} else {
|
||||||
|
// just a normal leaf
|
||||||
|
TransformTtAction::Keep
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tt::TokenTree::Subtree(tt) => {
|
||||||
if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
||||||
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
||||||
{
|
{
|
||||||
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server
|
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server
|
||||||
// might copy them if the proc-macro asks for it, so we need to filter those out
|
// might copy them if the proc-macro asks for it, so we need to filter those out
|
||||||
// here as well.
|
// here as well.
|
||||||
return SmallVec::new_const();
|
return TransformTtAction::remove();
|
||||||
}
|
}
|
||||||
reverse_fixups_(&mut tt, undo_info);
|
TransformTtAction::Keep
|
||||||
SmallVec::from_const([tt.into()])
|
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
});
|
||||||
if leaf.span().anchor.ast_id == FIXUP_DUMMY_AST_ID {
|
tt.0 = tts.into_boxed_slice();
|
||||||
// we have a fake node here, we need to replace it again with the original
|
|
||||||
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
|
|
||||||
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
|
||||||
SmallVec::from(original.token_trees.into_vec())
|
|
||||||
} else {
|
|
||||||
SmallVec::from_const([original.into()])
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// just a normal leaf
|
|
||||||
SmallVec::from_const([leaf.into()])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
@ -458,16 +507,18 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_subtree_eq(a: &tt::Subtree, b: &tt::Subtree) -> bool {
|
fn check_subtree_eq(a: &tt::TopSubtree, b: &tt::TopSubtree) -> bool {
|
||||||
a.delimiter.kind == b.delimiter.kind
|
let a = a.view().as_token_trees().flat_tokens();
|
||||||
&& a.token_trees.len() == b.token_trees.len()
|
let b = b.view().as_token_trees().flat_tokens();
|
||||||
&& a.token_trees.iter().zip(b.token_trees.iter()).all(|(a, b)| check_tt_eq(a, b))
|
a.len() == b.len() && std::iter::zip(a, b).all(|(a, b)| check_tt_eq(a, b))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
|
fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
|
||||||
match (a, b) {
|
match (a, b) {
|
||||||
(tt::TokenTree::Leaf(a), tt::TokenTree::Leaf(b)) => check_leaf_eq(a, b),
|
(tt::TokenTree::Leaf(a), tt::TokenTree::Leaf(b)) => check_leaf_eq(a, b),
|
||||||
(tt::TokenTree::Subtree(a), tt::TokenTree::Subtree(b)) => check_subtree_eq(a, b),
|
(tt::TokenTree::Subtree(a), tt::TokenTree::Subtree(b)) => {
|
||||||
|
a.delimiter.kind == b.delimiter.kind
|
||||||
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -70,12 +70,17 @@ pub mod tt {
|
||||||
pub type Delimiter = ::tt::Delimiter<Span>;
|
pub type Delimiter = ::tt::Delimiter<Span>;
|
||||||
pub type DelimSpan = ::tt::DelimSpan<Span>;
|
pub type DelimSpan = ::tt::DelimSpan<Span>;
|
||||||
pub type Subtree = ::tt::Subtree<Span>;
|
pub type Subtree = ::tt::Subtree<Span>;
|
||||||
pub type SubtreeBuilder = ::tt::SubtreeBuilder<Span>;
|
|
||||||
pub type Leaf = ::tt::Leaf<Span>;
|
pub type Leaf = ::tt::Leaf<Span>;
|
||||||
pub type Literal = ::tt::Literal<Span>;
|
pub type Literal = ::tt::Literal<Span>;
|
||||||
pub type Punct = ::tt::Punct<Span>;
|
pub type Punct = ::tt::Punct<Span>;
|
||||||
pub type Ident = ::tt::Ident<Span>;
|
pub type Ident = ::tt::Ident<Span>;
|
||||||
pub type TokenTree = ::tt::TokenTree<Span>;
|
pub type TokenTree = ::tt::TokenTree<Span>;
|
||||||
|
pub type TopSubtree = ::tt::TopSubtree<Span>;
|
||||||
|
pub type TopSubtreeBuilder = ::tt::TopSubtreeBuilder<Span>;
|
||||||
|
pub type TokenTreesView<'a> = ::tt::TokenTreesView<'a, Span>;
|
||||||
|
pub type SubtreeView<'a> = ::tt::SubtreeView<'a, Span>;
|
||||||
|
pub type TtElement<'a> = ::tt::iter::TtElement<'a, Span>;
|
||||||
|
pub type TtIter<'a> = ::tt::iter::TtIter<'a, Span>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
|
|
@ -284,7 +289,7 @@ impl MacroDefKind {
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct EagerCallInfo {
|
pub struct EagerCallInfo {
|
||||||
/// The expanded argument of the eager macro.
|
/// The expanded argument of the eager macro.
|
||||||
arg: Arc<tt::Subtree>,
|
arg: Arc<tt::TopSubtree>,
|
||||||
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
|
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
|
||||||
arg_id: MacroCallId,
|
arg_id: MacroCallId,
|
||||||
error: Option<ExpandError>,
|
error: Option<ExpandError>,
|
||||||
|
|
@ -320,7 +325,7 @@ pub enum MacroCallKind {
|
||||||
ast_id: AstId<ast::Item>,
|
ast_id: AstId<ast::Item>,
|
||||||
// FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
|
// FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
|
||||||
// but we need to fix the `cfg_attr` handling first.
|
// but we need to fix the `cfg_attr` handling first.
|
||||||
attr_args: Option<Arc<tt::Subtree>>,
|
attr_args: Option<Arc<tt::TopSubtree>>,
|
||||||
/// Syntactical index of the invoking `#[attribute]`.
|
/// Syntactical index of the invoking `#[attribute]`.
|
||||||
///
|
///
|
||||||
/// Outer attributes are counted first, then inner attributes. This does not support
|
/// Outer attributes are counted first, then inner attributes. This does not support
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ impl ModPath {
|
||||||
convert_path(db, path, span_for_range)
|
convert_path(db, path, span_for_range)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
pub fn from_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Option<ModPath> {
|
||||||
convert_path_tt(db, tt)
|
convert_path_tt(db, tt)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -315,10 +315,10 @@ fn convert_path(
|
||||||
Some(mod_path)
|
Some(mod_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
fn convert_path_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Option<ModPath> {
|
||||||
let mut leaves = tt.iter().filter_map(|tt| match tt {
|
let mut leaves = tt.iter().filter_map(|tt| match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => Some(leaf),
|
tt::TtElement::Leaf(leaf) => Some(leaf),
|
||||||
tt::TokenTree::Subtree(_) => None,
|
tt::TtElement::Subtree(..) => None,
|
||||||
});
|
});
|
||||||
let mut segments = smallvec::smallvec![];
|
let mut segments = smallvec::smallvec![];
|
||||||
let kind = match leaves.next()? {
|
let kind = match leaves.next()? {
|
||||||
|
|
|
||||||
|
|
@ -23,14 +23,14 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
|
||||||
/// [`ProcMacroKind::Attr`]), environment variables, and span information.
|
/// [`ProcMacroKind::Attr`]), environment variables, and span information.
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &tt::Subtree,
|
subtree: &tt::TopSubtree,
|
||||||
attrs: Option<&tt::Subtree>,
|
attrs: Option<&tt::TopSubtree>,
|
||||||
env: &Env,
|
env: &Env,
|
||||||
def_site: Span,
|
def_site: Span,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
mixed_site: Span,
|
mixed_site: Span,
|
||||||
current_dir: Option<String>,
|
current_dir: Option<String>,
|
||||||
) -> Result<tt::Subtree, ProcMacroExpansionError>;
|
) -> Result<tt::TopSubtree, ProcMacroExpansionError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
@ -201,23 +201,23 @@ impl CustomProcMacroExpander {
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
def_crate: CrateId,
|
def_crate: CrateId,
|
||||||
calling_crate: CrateId,
|
calling_crate: CrateId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::TopSubtree,
|
||||||
attr_arg: Option<&tt::Subtree>,
|
attr_arg: Option<&tt::TopSubtree>,
|
||||||
def_site: Span,
|
def_site: Span,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
mixed_site: Span,
|
mixed_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::TopSubtree> {
|
||||||
match self.proc_macro_id {
|
match self.proc_macro_id {
|
||||||
Self::PROC_MACRO_ATTR_DISABLED => ExpandResult::new(
|
Self::PROC_MACRO_ATTR_DISABLED => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::TopSubtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::new(call_site, ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
ExpandError::new(call_site, ExpandErrorKind::ProcMacroAttrExpansionDisabled),
|
||||||
),
|
),
|
||||||
Self::MISSING_EXPANDER => ExpandResult::new(
|
Self::MISSING_EXPANDER => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::TopSubtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::new(call_site, ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
ExpandError::new(call_site, ExpandErrorKind::MissingProcMacroExpander(def_crate)),
|
||||||
),
|
),
|
||||||
Self::DISABLED_ID => ExpandResult::new(
|
Self::DISABLED_ID => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::TopSubtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::new(call_site, ExpandErrorKind::MacroDisabled),
|
ExpandError::new(call_site, ExpandErrorKind::MacroDisabled),
|
||||||
),
|
),
|
||||||
id => {
|
id => {
|
||||||
|
|
@ -226,7 +226,10 @@ impl CustomProcMacroExpander {
|
||||||
Ok(proc_macro) => proc_macro,
|
Ok(proc_macro) => proc_macro,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
return ExpandResult::new(
|
return ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::TopSubtree::empty(tt::DelimSpan {
|
||||||
|
open: call_site,
|
||||||
|
close: call_site,
|
||||||
|
}),
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -260,7 +263,10 @@ impl CustomProcMacroExpander {
|
||||||
}
|
}
|
||||||
ProcMacroExpansionError::System(text)
|
ProcMacroExpansionError::System(text)
|
||||||
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
|
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
tt::TopSubtree::empty(tt::DelimSpan {
|
||||||
|
open: call_site,
|
||||||
|
close: call_site,
|
||||||
|
}),
|
||||||
ExpandError::new(
|
ExpandError::new(
|
||||||
call_site,
|
call_site,
|
||||||
ExpandErrorKind::ProcMacroPanic(text.into_boxed_str()),
|
ExpandErrorKind::ProcMacroPanic(text.into_boxed_str()),
|
||||||
|
|
|
||||||
|
|
@ -113,7 +113,7 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
|
||||||
let get = |name| {
|
let get = |name| {
|
||||||
let attr = attrs.by_key(name).tt_values();
|
let attr = attrs.by_key(name).tt_values();
|
||||||
for tree in attr {
|
for tree in attr {
|
||||||
if let Some(it) = tree.token_trees.first() {
|
if let Some(it) = tree.iter().next_as_view() {
|
||||||
let text = it.to_string().replace('_', "");
|
let text = it.to_string().replace('_', "");
|
||||||
let (text, base) = match text.as_bytes() {
|
let (text, base) = match text.as_bytes() {
|
||||||
[b'0', b'x', ..] => (&text[2..], 16),
|
[b'0', b'x', ..] => (&text[2..], 16),
|
||||||
|
|
|
||||||
|
|
@ -476,17 +476,17 @@ struct Expander(proc_macro_api::ProcMacro);
|
||||||
impl ProcMacroExpander for Expander {
|
impl ProcMacroExpander for Expander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &tt::Subtree<Span>,
|
subtree: &tt::TopSubtree<Span>,
|
||||||
attrs: Option<&tt::Subtree<Span>>,
|
attrs: Option<&tt::TopSubtree<Span>>,
|
||||||
env: &Env,
|
env: &Env,
|
||||||
def_site: Span,
|
def_site: Span,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
mixed_site: Span,
|
mixed_site: Span,
|
||||||
current_dir: Option<String>,
|
current_dir: Option<String>,
|
||||||
) -> Result<tt::Subtree<Span>, ProcMacroExpansionError> {
|
) -> Result<tt::TopSubtree<Span>, ProcMacroExpansionError> {
|
||||||
match self.0.expand(
|
match self.0.expand(
|
||||||
subtree,
|
subtree.view(),
|
||||||
attrs,
|
attrs.map(|attrs| attrs.view()),
|
||||||
env.clone().into(),
|
env.clone().into(),
|
||||||
def_site,
|
def_site,
|
||||||
call_site,
|
call_site,
|
||||||
|
|
|
||||||
|
|
@ -53,11 +53,11 @@ fn benchmark_expand_macro_rules() {
|
||||||
.map(|(id, tt)| {
|
.map(|(id, tt)| {
|
||||||
let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
|
let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT);
|
||||||
assert!(res.err.is_none());
|
assert!(res.err.is_none());
|
||||||
res.value.0.token_trees.len()
|
res.value.0 .0.len()
|
||||||
})
|
})
|
||||||
.sum()
|
.sum()
|
||||||
};
|
};
|
||||||
assert_eq!(hash, 65720);
|
assert_eq!(hash, 450144);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
||||||
|
|
@ -68,7 +68,7 @@ fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> {
|
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::TopSubtree<Span>> {
|
||||||
let fixture = bench_fixture::numerous_macro_rules();
|
let fixture = bench_fixture::numerous_macro_rules();
|
||||||
let source_file = ast::SourceFile::parse(&fixture, span::Edition::CURRENT).ok().unwrap();
|
let source_file = ast::SourceFile::parse(&fixture, span::Edition::CURRENT).ok().unwrap();
|
||||||
|
|
||||||
|
|
@ -92,7 +92,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> {
|
||||||
/// Generate random invocation fixtures from rules
|
/// Generate random invocation fixtures from rules
|
||||||
fn invocation_fixtures(
|
fn invocation_fixtures(
|
||||||
rules: &FxHashMap<String, DeclarativeMacro>,
|
rules: &FxHashMap<String, DeclarativeMacro>,
|
||||||
) -> Vec<(String, tt::Subtree<Span>)> {
|
) -> Vec<(String, tt::TopSubtree<Span>)> {
|
||||||
let mut seed = 123456789;
|
let mut seed = 123456789;
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
|
|
@ -112,19 +112,16 @@ fn invocation_fixtures(
|
||||||
// So we just skip any error cases and try again
|
// So we just skip any error cases and try again
|
||||||
let mut try_cnt = 0;
|
let mut try_cnt = 0;
|
||||||
loop {
|
loop {
|
||||||
let mut token_trees = Vec::new();
|
let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter {
|
||||||
for op in rule.lhs.iter() {
|
|
||||||
collect_from_op(op, &mut token_trees, &mut seed);
|
|
||||||
}
|
|
||||||
|
|
||||||
let subtree = tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter {
|
|
||||||
open: DUMMY,
|
open: DUMMY,
|
||||||
close: DUMMY,
|
close: DUMMY,
|
||||||
kind: tt::DelimiterKind::Invisible,
|
kind: tt::DelimiterKind::Invisible,
|
||||||
},
|
});
|
||||||
token_trees: token_trees.into_boxed_slice(),
|
for op in rule.lhs.iter() {
|
||||||
};
|
collect_from_op(op, &mut builder, &mut seed);
|
||||||
|
}
|
||||||
|
let subtree = builder.build();
|
||||||
|
|
||||||
if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() {
|
if it.expand(&subtree, |_| (), DUMMY, Edition::CURRENT).err.is_none() {
|
||||||
res.push((name.clone(), subtree));
|
res.push((name.clone(), subtree));
|
||||||
break;
|
break;
|
||||||
|
|
@ -139,43 +136,41 @@ fn invocation_fixtures(
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
fn collect_from_op(op: &Op, token_trees: &mut Vec<tt::TokenTree<Span>>, seed: &mut usize) {
|
fn collect_from_op(op: &Op, builder: &mut tt::TopSubtreeBuilder<Span>, seed: &mut usize) {
|
||||||
return match op {
|
return match op {
|
||||||
Op::Var { kind, .. } => match kind.as_ref() {
|
Op::Var { kind, .. } => match kind.as_ref() {
|
||||||
Some(MetaVarKind::Ident) => token_trees.push(make_ident("foo")),
|
Some(MetaVarKind::Ident) => builder.push(make_ident("foo")),
|
||||||
Some(MetaVarKind::Ty) => token_trees.push(make_ident("Foo")),
|
Some(MetaVarKind::Ty) => builder.push(make_ident("Foo")),
|
||||||
Some(MetaVarKind::Tt) => token_trees.push(make_ident("foo")),
|
Some(MetaVarKind::Tt) => builder.push(make_ident("foo")),
|
||||||
Some(MetaVarKind::Vis) => token_trees.push(make_ident("pub")),
|
Some(MetaVarKind::Vis) => builder.push(make_ident("pub")),
|
||||||
Some(MetaVarKind::Pat) => token_trees.push(make_ident("foo")),
|
Some(MetaVarKind::Pat) => builder.push(make_ident("foo")),
|
||||||
Some(MetaVarKind::Path) => token_trees.push(make_ident("foo")),
|
Some(MetaVarKind::Path) => builder.push(make_ident("foo")),
|
||||||
Some(MetaVarKind::Literal) => token_trees.push(make_literal("1")),
|
Some(MetaVarKind::Literal) => builder.push(make_literal("1")),
|
||||||
Some(MetaVarKind::Expr(_)) => token_trees.push(make_ident("foo")),
|
Some(MetaVarKind::Expr(_)) => builder.push(make_ident("foo")),
|
||||||
Some(MetaVarKind::Lifetime) => {
|
Some(MetaVarKind::Lifetime) => {
|
||||||
token_trees.push(make_punct('\''));
|
builder.push(make_punct('\''));
|
||||||
token_trees.push(make_ident("a"));
|
builder.push(make_ident("a"));
|
||||||
}
|
|
||||||
Some(MetaVarKind::Block) => {
|
|
||||||
token_trees.push(make_subtree(tt::DelimiterKind::Brace, None))
|
|
||||||
}
|
}
|
||||||
|
Some(MetaVarKind::Block) => make_subtree(tt::DelimiterKind::Brace, builder),
|
||||||
Some(MetaVarKind::Item) => {
|
Some(MetaVarKind::Item) => {
|
||||||
token_trees.push(make_ident("fn"));
|
builder.push(make_ident("fn"));
|
||||||
token_trees.push(make_ident("foo"));
|
builder.push(make_ident("foo"));
|
||||||
token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
|
make_subtree(tt::DelimiterKind::Parenthesis, builder);
|
||||||
token_trees.push(make_subtree(tt::DelimiterKind::Brace, None));
|
make_subtree(tt::DelimiterKind::Brace, builder);
|
||||||
}
|
}
|
||||||
Some(MetaVarKind::Meta) => {
|
Some(MetaVarKind::Meta) => {
|
||||||
token_trees.push(make_ident("foo"));
|
builder.push(make_ident("foo"));
|
||||||
token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
|
make_subtree(tt::DelimiterKind::Parenthesis, builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
None => (),
|
None => (),
|
||||||
Some(kind) => panic!("Unhandled kind {kind:?}"),
|
Some(kind) => panic!("Unhandled kind {kind:?}"),
|
||||||
},
|
},
|
||||||
Op::Literal(it) => token_trees.push(tt::Leaf::from(it.clone()).into()),
|
Op::Literal(it) => builder.push(tt::Leaf::from(it.clone())),
|
||||||
Op::Ident(it) => token_trees.push(tt::Leaf::from(it.clone()).into()),
|
Op::Ident(it) => builder.push(tt::Leaf::from(it.clone())),
|
||||||
Op::Punct(puncts) => {
|
Op::Punct(puncts) => {
|
||||||
for punct in puncts.as_slice() {
|
for punct in puncts.as_slice() {
|
||||||
token_trees.push(tt::Leaf::from(*punct).into());
|
builder.push(tt::Leaf::from(*punct));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Op::Repeat { tokens, kind, separator } => {
|
Op::Repeat { tokens, kind, separator } => {
|
||||||
|
|
@ -187,20 +182,18 @@ fn invocation_fixtures(
|
||||||
};
|
};
|
||||||
for i in 0..cnt {
|
for i in 0..cnt {
|
||||||
for it in tokens.iter() {
|
for it in tokens.iter() {
|
||||||
collect_from_op(it, token_trees, seed);
|
collect_from_op(it, builder, seed);
|
||||||
}
|
}
|
||||||
if i + 1 != cnt {
|
if i + 1 != cnt {
|
||||||
if let Some(sep) = separator {
|
if let Some(sep) = separator {
|
||||||
match &**sep {
|
match &**sep {
|
||||||
Separator::Literal(it) => {
|
Separator::Literal(it) => {
|
||||||
token_trees.push(tt::Leaf::Literal(it.clone()).into())
|
builder.push(tt::Leaf::Literal(it.clone()))
|
||||||
}
|
|
||||||
Separator::Ident(it) => {
|
|
||||||
token_trees.push(tt::Leaf::Ident(it.clone()).into())
|
|
||||||
}
|
}
|
||||||
|
Separator::Ident(it) => builder.push(tt::Leaf::Ident(it.clone())),
|
||||||
Separator::Puncts(puncts) => {
|
Separator::Puncts(puncts) => {
|
||||||
for it in puncts {
|
for it in puncts {
|
||||||
token_trees.push(tt::Leaf::Punct(*it).into())
|
builder.push(tt::Leaf::Punct(*it))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -209,15 +202,9 @@ fn invocation_fixtures(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Op::Subtree { tokens, delimiter } => {
|
Op::Subtree { tokens, delimiter } => {
|
||||||
let mut subtree = Vec::new();
|
builder.open(delimiter.kind, delimiter.open);
|
||||||
tokens.iter().for_each(|it| {
|
tokens.iter().for_each(|it| collect_from_op(it, builder, seed));
|
||||||
collect_from_op(it, &mut subtree, seed);
|
builder.close(delimiter.close);
|
||||||
});
|
|
||||||
|
|
||||||
let subtree =
|
|
||||||
tt::Subtree { delimiter: *delimiter, token_trees: subtree.into_boxed_slice() };
|
|
||||||
|
|
||||||
token_trees.push(subtree.into());
|
|
||||||
}
|
}
|
||||||
Op::Ignore { .. }
|
Op::Ignore { .. }
|
||||||
| Op::Index { .. }
|
| Op::Index { .. }
|
||||||
|
|
@ -233,35 +220,27 @@ fn invocation_fixtures(
|
||||||
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
||||||
*seed
|
*seed
|
||||||
}
|
}
|
||||||
fn make_ident(ident: &str) -> tt::TokenTree<Span> {
|
fn make_ident(ident: &str) -> tt::Leaf<Span> {
|
||||||
tt::Leaf::Ident(tt::Ident {
|
tt::Leaf::Ident(tt::Ident {
|
||||||
span: DUMMY,
|
span: DUMMY,
|
||||||
sym: Symbol::intern(ident),
|
sym: Symbol::intern(ident),
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
})
|
})
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
fn make_punct(char: char) -> tt::TokenTree<Span> {
|
fn make_punct(char: char) -> tt::Leaf<Span> {
|
||||||
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
|
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone })
|
||||||
}
|
}
|
||||||
fn make_literal(lit: &str) -> tt::TokenTree<Span> {
|
fn make_literal(lit: &str) -> tt::Leaf<Span> {
|
||||||
tt::Leaf::Literal(tt::Literal {
|
tt::Leaf::Literal(tt::Literal {
|
||||||
span: DUMMY,
|
span: DUMMY,
|
||||||
symbol: Symbol::intern(lit),
|
symbol: Symbol::intern(lit),
|
||||||
kind: tt::LitKind::Str,
|
kind: tt::LitKind::Str,
|
||||||
suffix: None,
|
suffix: None,
|
||||||
})
|
})
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
fn make_subtree(
|
fn make_subtree(kind: tt::DelimiterKind, builder: &mut tt::TopSubtreeBuilder<Span>) {
|
||||||
kind: tt::DelimiterKind,
|
builder.open(kind, DUMMY);
|
||||||
token_trees: Option<Vec<tt::TokenTree<Span>>>,
|
builder.close(DUMMY);
|
||||||
) -> tt::TokenTree<Span> {
|
|
||||||
tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
|
|
||||||
token_trees: token_trees.map(Vec::into_boxed_slice).unwrap_or_default(),
|
|
||||||
}
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -13,12 +13,12 @@ use crate::{parser::MetaVarKind, ExpandError, ExpandErrorKind, ExpandResult, Mat
|
||||||
|
|
||||||
pub(crate) fn expand_rules(
|
pub(crate) fn expand_rules(
|
||||||
rules: &[crate::Rule],
|
rules: &[crate::Rule],
|
||||||
input: &tt::Subtree<Span>,
|
input: &tt::TopSubtree<Span>,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
def_site_edition: Edition,
|
def_site_edition: Edition,
|
||||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
|
||||||
let mut match_: Option<(matcher::Match, &crate::Rule, usize)> = None;
|
let mut match_: Option<(matcher::Match<'_>, &crate::Rule, usize)> = None;
|
||||||
for (idx, rule) in rules.iter().enumerate() {
|
for (idx, rule) in rules.iter().enumerate() {
|
||||||
let new_match = matcher::match_(&rule.lhs, input, def_site_edition);
|
let new_match = matcher::match_(&rule.lhs, input, def_site_edition);
|
||||||
|
|
||||||
|
|
@ -50,13 +50,7 @@ pub(crate) fn expand_rules(
|
||||||
ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) }
|
ExpandResult { value: (value, idx.try_into().ok()), err: match_.err.or(transcribe_err) }
|
||||||
} else {
|
} else {
|
||||||
ExpandResult::new(
|
ExpandResult::new(
|
||||||
(
|
(tt::TopSubtree::empty(tt::DelimSpan::from_single(call_site)), None),
|
||||||
tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(call_site),
|
|
||||||
token_trees: Box::default(),
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
),
|
|
||||||
ExpandError::new(call_site, ExpandErrorKind::NoMatchingRule),
|
ExpandError::new(call_site, ExpandErrorKind::NoMatchingRule),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -107,32 +101,35 @@ pub(crate) fn expand_rules(
|
||||||
/// In other words, `Bindings` is a *multi* mapping from `Symbol` to
|
/// In other words, `Bindings` is a *multi* mapping from `Symbol` to
|
||||||
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
|
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
|
||||||
/// many is not a plain `usize`, but a `&[usize]`.
|
/// many is not a plain `usize`, but a `&[usize]`.
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
#[derive(Debug, Default, Clone)]
|
||||||
struct Bindings {
|
struct Bindings<'a> {
|
||||||
inner: FxHashMap<Symbol, Binding>,
|
inner: FxHashMap<Symbol, Binding<'a>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone)]
|
||||||
enum Binding {
|
enum Binding<'a> {
|
||||||
Fragment(Fragment),
|
Fragment(Fragment<'a>),
|
||||||
Nested(Vec<Binding>),
|
Nested(Vec<Binding<'a>>),
|
||||||
Empty,
|
Empty,
|
||||||
Missing(MetaVarKind),
|
Missing(MetaVarKind),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Default, Clone)]
|
||||||
enum Fragment {
|
enum Fragment<'a> {
|
||||||
|
#[default]
|
||||||
Empty,
|
Empty,
|
||||||
/// token fragments are just copy-pasted into the output
|
/// token fragments are just copy-pasted into the output
|
||||||
Tokens(tt::TokenTree<Span>),
|
Tokens(tt::TokenTreesView<'a, Span>),
|
||||||
/// Expr ast fragments are surrounded with `()` on insertion to preserve
|
/// Expr ast fragments are surrounded with `()` on transcription to preserve precedence.
|
||||||
/// precedence. Note that this impl is different from the one currently in
|
/// Note that this impl is different from the one currently in `rustc` --
|
||||||
/// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
|
/// `rustc` doesn't translate fragments into token trees at all.
|
||||||
///
|
///
|
||||||
/// At one point in time, we tried to use "fake" delimiters here à la
|
/// At one point in time, we tried to use "fake" delimiters here à la
|
||||||
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
|
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
|
||||||
/// tricky to handle in the parser, and rustc doesn't handle those either.
|
/// tricky to handle in the parser, and rustc doesn't handle those either.
|
||||||
Expr(tt::Subtree<Span>),
|
///
|
||||||
|
/// The span of the outer delimiters is marked on transcription.
|
||||||
|
Expr(tt::TokenTreesView<'a, Span>),
|
||||||
/// There are roughly two types of paths: paths in expression context, where a
|
/// There are roughly two types of paths: paths in expression context, where a
|
||||||
/// separator `::` between an identifier and its following generic argument list
|
/// separator `::` between an identifier and its following generic argument list
|
||||||
/// is mandatory, and paths in type context, where `::` can be omitted.
|
/// is mandatory, and paths in type context, where `::` can be omitted.
|
||||||
|
|
@ -142,5 +139,18 @@ enum Fragment {
|
||||||
/// and is trasncribed as an expression-context path, verbatim transcription
|
/// and is trasncribed as an expression-context path, verbatim transcription
|
||||||
/// would cause a syntax error. We need to fix it up just before transcribing;
|
/// would cause a syntax error. We need to fix it up just before transcribing;
|
||||||
/// see `transcriber::fix_up_and_push_path_tt()`.
|
/// see `transcriber::fix_up_and_push_path_tt()`.
|
||||||
Path(tt::Subtree<Span>),
|
Path(tt::TokenTreesView<'a, Span>),
|
||||||
|
TokensOwned(tt::TopSubtree<Span>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Fragment<'_> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Fragment::Empty => true,
|
||||||
|
Fragment::Tokens(it) => it.len() == 0,
|
||||||
|
Fragment::Expr(it) => it.len() == 0,
|
||||||
|
Fragment::Path(it) => it.len() == 0,
|
||||||
|
Fragment::TokensOwned(it) => it.0.is_empty(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -64,7 +64,10 @@ use std::{rc::Rc, sync::Arc};
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use span::{Edition, Span};
|
use span::{Edition, Span};
|
||||||
use tt::{iter::TtIter, DelimSpan};
|
use tt::{
|
||||||
|
iter::{TtElement, TtIter},
|
||||||
|
DelimSpan,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
expander::{Binding, Bindings, ExpandResult, Fragment},
|
expander::{Binding, Bindings, ExpandResult, Fragment},
|
||||||
|
|
@ -73,7 +76,7 @@ use crate::{
|
||||||
ExpandError, ExpandErrorKind, MetaTemplate, ValueResult,
|
ExpandError, ExpandErrorKind, MetaTemplate, ValueResult,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl Bindings {
|
impl<'a> Bindings<'a> {
|
||||||
fn push_optional(&mut self, name: Symbol) {
|
fn push_optional(&mut self, name: Symbol) {
|
||||||
self.inner.insert(name, Binding::Fragment(Fragment::Empty));
|
self.inner.insert(name, Binding::Fragment(Fragment::Empty));
|
||||||
}
|
}
|
||||||
|
|
@ -82,14 +85,14 @@ impl Bindings {
|
||||||
self.inner.insert(name, Binding::Empty);
|
self.inner.insert(name, Binding::Empty);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bindings(&self) -> impl Iterator<Item = &Binding> {
|
fn bindings(&self) -> impl Iterator<Item = &Binding<'a>> {
|
||||||
self.inner.values()
|
self.inner.values()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, PartialEq, Eq)]
|
#[derive(Clone, Default, Debug)]
|
||||||
pub(super) struct Match {
|
pub(super) struct Match<'a> {
|
||||||
pub(super) bindings: Bindings,
|
pub(super) bindings: Bindings<'a>,
|
||||||
/// We currently just keep the first error and count the rest to compare matches.
|
/// We currently just keep the first error and count the rest to compare matches.
|
||||||
pub(super) err: Option<ExpandError>,
|
pub(super) err: Option<ExpandError>,
|
||||||
pub(super) err_count: usize,
|
pub(super) err_count: usize,
|
||||||
|
|
@ -99,7 +102,7 @@ pub(super) struct Match {
|
||||||
pub(super) bound_count: usize,
|
pub(super) bound_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Match {
|
impl Match<'_> {
|
||||||
fn add_err(&mut self, err: ExpandError) {
|
fn add_err(&mut self, err: ExpandError) {
|
||||||
let prev_err = self.err.take();
|
let prev_err = self.err.take();
|
||||||
self.err = prev_err.or(Some(err));
|
self.err = prev_err.or(Some(err));
|
||||||
|
|
@ -108,12 +111,16 @@ impl Match {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matching errors are added to the `Match`.
|
/// Matching errors are added to the `Match`.
|
||||||
pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>, edition: Edition) -> Match {
|
pub(super) fn match_<'t>(
|
||||||
|
pattern: &'t MetaTemplate,
|
||||||
|
input: &'t tt::TopSubtree<Span>,
|
||||||
|
edition: Edition,
|
||||||
|
) -> Match<'t> {
|
||||||
let mut res = match_loop(pattern, input, edition);
|
let mut res = match_loop(pattern, input, edition);
|
||||||
res.bound_count = count(res.bindings.bindings());
|
res.bound_count = count(res.bindings.bindings());
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
|
fn count<'a>(bindings: impl Iterator<Item = &'a Binding<'a>>) -> usize {
|
||||||
bindings
|
bindings
|
||||||
.map(|it| match it {
|
.map(|it| match it {
|
||||||
Binding::Fragment(_) => 1,
|
Binding::Fragment(_) => 1,
|
||||||
|
|
@ -126,10 +133,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>, edition:
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
enum BindingKind {
|
enum BindingKind<'a> {
|
||||||
Empty(Symbol),
|
Empty(Symbol),
|
||||||
Optional(Symbol),
|
Optional(Symbol),
|
||||||
Fragment(Symbol, Fragment),
|
Fragment(Symbol, Fragment<'a>),
|
||||||
Missing(Symbol, MetaVarKind),
|
Missing(Symbol, MetaVarKind),
|
||||||
Nested(usize, usize),
|
Nested(usize, usize),
|
||||||
}
|
}
|
||||||
|
|
@ -144,12 +151,12 @@ enum LinkNode<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct BindingsBuilder {
|
struct BindingsBuilder<'a> {
|
||||||
nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
|
nodes: Vec<Vec<LinkNode<Rc<BindingKind<'a>>>>>,
|
||||||
nested: Vec<Vec<LinkNode<usize>>>,
|
nested: Vec<Vec<LinkNode<usize>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BindingsBuilder {
|
impl<'a> BindingsBuilder<'a> {
|
||||||
fn alloc(&mut self) -> BindingsIdx {
|
fn alloc(&mut self) -> BindingsIdx {
|
||||||
let idx = self.nodes.len();
|
let idx = self.nodes.len();
|
||||||
self.nodes.push(Vec::new());
|
self.nodes.push(Vec::new());
|
||||||
|
|
@ -186,7 +193,7 @@ impl BindingsBuilder {
|
||||||
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
|
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &Symbol, fragment: Fragment) {
|
fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &Symbol, fragment: Fragment<'a>) {
|
||||||
self.nodes[idx.0]
|
self.nodes[idx.0]
|
||||||
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
|
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
|
||||||
}
|
}
|
||||||
|
|
@ -207,11 +214,11 @@ impl BindingsBuilder {
|
||||||
idx.0 = new_idx;
|
idx.0 = new_idx;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build(self, idx: &BindingsIdx) -> Bindings {
|
fn build(self, idx: &BindingsIdx) -> Bindings<'a> {
|
||||||
self.build_inner(&self.nodes[idx.0])
|
self.build_inner(&self.nodes[idx.0])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind>>]) -> Bindings {
|
fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind<'a>>>]) -> Bindings<'a> {
|
||||||
let mut bindings = Bindings::default();
|
let mut bindings = Bindings::default();
|
||||||
let mut nodes = Vec::new();
|
let mut nodes = Vec::new();
|
||||||
self.collect_nodes(link_nodes, &mut nodes);
|
self.collect_nodes(link_nodes, &mut nodes);
|
||||||
|
|
@ -257,11 +264,11 @@ impl BindingsBuilder {
|
||||||
bindings
|
bindings
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_nested_ref<'a>(
|
fn collect_nested_ref<'b>(
|
||||||
&'a self,
|
&'b self,
|
||||||
id: usize,
|
id: usize,
|
||||||
len: usize,
|
len: usize,
|
||||||
nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind>>]>,
|
nested_refs: &mut Vec<&'b [LinkNode<Rc<BindingKind<'a>>>]>,
|
||||||
) {
|
) {
|
||||||
self.nested[id].iter().take(len).for_each(|it| match it {
|
self.nested[id].iter().take(len).for_each(|it| match it {
|
||||||
LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
|
LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
|
||||||
|
|
@ -269,7 +276,7 @@ impl BindingsBuilder {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) {
|
fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings<'a>>) {
|
||||||
let last = &self.nodes[idx];
|
let last = &self.nodes[idx];
|
||||||
let mut nested_refs: Vec<&[_]> = Vec::new();
|
let mut nested_refs: Vec<&[_]> = Vec::new();
|
||||||
self.nested[nested_idx].iter().for_each(|it| match *it {
|
self.nested[nested_idx].iter().for_each(|it| match *it {
|
||||||
|
|
@ -280,17 +287,22 @@ impl BindingsBuilder {
|
||||||
nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
|
nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) {
|
fn collect_nodes_ref<'b>(
|
||||||
|
&'b self,
|
||||||
|
id: usize,
|
||||||
|
len: usize,
|
||||||
|
nodes: &mut Vec<&'b BindingKind<'a>>,
|
||||||
|
) {
|
||||||
self.nodes[id].iter().take(len).for_each(|it| match it {
|
self.nodes[id].iter().take(len).for_each(|it| match it {
|
||||||
LinkNode::Node(it) => nodes.push(it),
|
LinkNode::Node(it) => nodes.push(it),
|
||||||
LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
|
LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_nodes<'a>(
|
fn collect_nodes<'b>(
|
||||||
&'a self,
|
&'b self,
|
||||||
link_nodes: &'a [LinkNode<Rc<BindingKind>>],
|
link_nodes: &'b [LinkNode<Rc<BindingKind<'a>>>],
|
||||||
nodes: &mut Vec<&'a BindingKind>,
|
nodes: &mut Vec<&'b BindingKind<'a>>,
|
||||||
) {
|
) {
|
||||||
link_nodes.iter().for_each(|it| match it {
|
link_nodes.iter().for_each(|it| match it {
|
||||||
LinkNode::Node(it) => nodes.push(it),
|
LinkNode::Node(it) => nodes.push(it),
|
||||||
|
|
@ -327,7 +339,7 @@ struct MatchState<'t> {
|
||||||
bindings: BindingsIdx,
|
bindings: BindingsIdx,
|
||||||
|
|
||||||
/// Cached result of meta variable parsing
|
/// Cached result of meta variable parsing
|
||||||
meta_result: Option<(TtIter<'t, Span>, ExpandResult<Option<Fragment>>)>,
|
meta_result: Option<(TtIter<'t, Span>, ExpandResult<Option<Fragment<'t>>>)>,
|
||||||
|
|
||||||
/// Is error occurred in this state, will `poised` to "parent"
|
/// Is error occurred in this state, will `poised` to "parent"
|
||||||
is_error: bool,
|
is_error: bool,
|
||||||
|
|
@ -355,8 +367,8 @@ struct MatchState<'t> {
|
||||||
fn match_loop_inner<'t>(
|
fn match_loop_inner<'t>(
|
||||||
src: TtIter<'t, Span>,
|
src: TtIter<'t, Span>,
|
||||||
stack: &[TtIter<'t, Span>],
|
stack: &[TtIter<'t, Span>],
|
||||||
res: &mut Match,
|
res: &mut Match<'t>,
|
||||||
bindings_builder: &mut BindingsBuilder,
|
bindings_builder: &mut BindingsBuilder<'t>,
|
||||||
cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
|
cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
|
||||||
bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
|
bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
|
||||||
next_items: &mut Vec<MatchState<'t>>,
|
next_items: &mut Vec<MatchState<'t>>,
|
||||||
|
|
@ -463,7 +475,7 @@ fn match_loop_inner<'t>(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
OpDelimited::Op(Op::Subtree { tokens, delimiter }) => {
|
OpDelimited::Op(Op::Subtree { tokens, delimiter }) => {
|
||||||
if let Ok(subtree) = src.clone().expect_subtree() {
|
if let Ok((subtree, _)) = src.clone().expect_subtree() {
|
||||||
if subtree.delimiter.kind == delimiter.kind {
|
if subtree.delimiter.kind == delimiter.kind {
|
||||||
item.stack.push(item.dot);
|
item.stack.push(item.dot);
|
||||||
item.dot = tokens.iter_delimited_with(*delimiter);
|
item.dot = tokens.iter_delimited_with(*delimiter);
|
||||||
|
|
@ -478,8 +490,8 @@ fn match_loop_inner<'t>(
|
||||||
match match_res.err {
|
match match_res.err {
|
||||||
None => {
|
None => {
|
||||||
// Some meta variables are optional (e.g. vis)
|
// Some meta variables are optional (e.g. vis)
|
||||||
if match_res.value.is_some() {
|
if !match_res.value.is_empty() {
|
||||||
item.meta_result = Some((fork, match_res));
|
item.meta_result = Some((fork, match_res.map(Some)));
|
||||||
try_push!(bb_items, item);
|
try_push!(bb_items, item);
|
||||||
} else {
|
} else {
|
||||||
bindings_builder.push_optional(&mut item.bindings, name);
|
bindings_builder.push_optional(&mut item.bindings, name);
|
||||||
|
|
@ -489,16 +501,15 @@ fn match_loop_inner<'t>(
|
||||||
}
|
}
|
||||||
Some(err) => {
|
Some(err) => {
|
||||||
res.add_err(err);
|
res.add_err(err);
|
||||||
match match_res.value {
|
if !match_res.value.is_empty() {
|
||||||
Some(fragment) => bindings_builder.push_fragment(
|
bindings_builder.push_fragment(
|
||||||
&mut item.bindings,
|
&mut item.bindings,
|
||||||
name,
|
name,
|
||||||
fragment,
|
match_res.value,
|
||||||
),
|
)
|
||||||
None => {
|
} else {
|
||||||
bindings_builder.push_missing(&mut item.bindings, name, kind)
|
bindings_builder.push_missing(&mut item.bindings, name, kind)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
item.is_error = true;
|
item.is_error = true;
|
||||||
error_items.push(item);
|
error_items.push(item);
|
||||||
}
|
}
|
||||||
|
|
@ -593,13 +604,13 @@ fn match_loop_inner<'t>(
|
||||||
stdx::never!("metavariable expression in lhs found");
|
stdx::never!("metavariable expression in lhs found");
|
||||||
}
|
}
|
||||||
OpDelimited::Open => {
|
OpDelimited::Open => {
|
||||||
if matches!(src.peek_n(0), Some(tt::TokenTree::Subtree(..))) {
|
if matches!(src.peek(), Some(TtElement::Subtree(..))) {
|
||||||
item.dot.next();
|
item.dot.next();
|
||||||
try_push!(next_items, item);
|
try_push!(next_items, item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
OpDelimited::Close => {
|
OpDelimited::Close => {
|
||||||
let is_delim_closed = src.peek_n(0).is_none() && !stack.is_empty();
|
let is_delim_closed = src.is_empty() && !stack.is_empty();
|
||||||
if is_delim_closed {
|
if is_delim_closed {
|
||||||
item.dot.next();
|
item.dot.next();
|
||||||
try_push!(next_items, item);
|
try_push!(next_items, item);
|
||||||
|
|
@ -609,9 +620,13 @@ fn match_loop_inner<'t>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition) -> Match {
|
fn match_loop<'t>(
|
||||||
let span = src.delimiter.delim_span();
|
pattern: &'t MetaTemplate,
|
||||||
let mut src = TtIter::new(src);
|
src: &'t tt::TopSubtree<Span>,
|
||||||
|
edition: Edition,
|
||||||
|
) -> Match<'t> {
|
||||||
|
let span = src.top_subtree().delimiter.delim_span();
|
||||||
|
let mut src = src.iter();
|
||||||
let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new();
|
let mut stack: SmallVec<[TtIter<'_, Span>; 1]> = SmallVec::new();
|
||||||
let mut res = Match::default();
|
let mut res = Match::default();
|
||||||
let mut error_recover_item = None;
|
let mut error_recover_item = None;
|
||||||
|
|
@ -663,7 +678,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
||||||
// We need to do some post processing after the `match_loop_inner`.
|
// We need to do some post processing after the `match_loop_inner`.
|
||||||
// If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
|
// If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
|
||||||
// either the parse is ambiguous (which should never happen) or there is a syntax error.
|
// either the parse is ambiguous (which should never happen) or there is a syntax error.
|
||||||
if src.peek_n(0).is_none() && stack.is_empty() {
|
if src.is_empty() && stack.is_empty() {
|
||||||
if let [state] = &*eof_items {
|
if let [state] = &*eof_items {
|
||||||
// remove all errors, because it is the correct answer !
|
// remove all errors, because it is the correct answer !
|
||||||
res = Match::default();
|
res = Match::default();
|
||||||
|
|
@ -687,11 +702,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
||||||
|| !(bb_items.is_empty() || next_items.is_empty())
|
|| !(bb_items.is_empty() || next_items.is_empty())
|
||||||
|| bb_items.len() > 1;
|
|| bb_items.len() > 1;
|
||||||
if has_leftover_tokens {
|
if has_leftover_tokens {
|
||||||
res.unmatched_tts += src.len();
|
res.unmatched_tts += src.remaining().flat_tokens().len();
|
||||||
while let Some(it) = stack.pop() {
|
|
||||||
src = it;
|
|
||||||
res.unmatched_tts += src.len();
|
|
||||||
}
|
|
||||||
res.add_err(ExpandError::new(span.open, ExpandErrorKind::LeftoverTokens));
|
res.add_err(ExpandError::new(span.open, ExpandErrorKind::LeftoverTokens));
|
||||||
|
|
||||||
if let Some(error_recover_item) = error_recover_item {
|
if let Some(error_recover_item) = error_recover_item {
|
||||||
|
|
@ -714,9 +725,9 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match src.next() {
|
match src.next() {
|
||||||
Some(tt::TokenTree::Subtree(subtree)) => {
|
Some(TtElement::Subtree(_, subtree_iter)) => {
|
||||||
stack.push(src.clone());
|
stack.push(src.clone());
|
||||||
src = TtIter::new(subtree);
|
src = subtree_iter;
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if let Some(iter) = stack.pop() {
|
if let Some(iter) = stack.pop() {
|
||||||
|
|
@ -760,18 +771,16 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree<Span>, edition: Edition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_meta_var(
|
fn match_meta_var<'t>(
|
||||||
kind: MetaVarKind,
|
kind: MetaVarKind,
|
||||||
input: &mut TtIter<'_, Span>,
|
input: &mut TtIter<'t, Span>,
|
||||||
delim_span: DelimSpan<Span>,
|
delim_span: DelimSpan<Span>,
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
) -> ExpandResult<Option<Fragment>> {
|
) -> ExpandResult<Fragment<'t>> {
|
||||||
let fragment = match kind {
|
let fragment = match kind {
|
||||||
MetaVarKind::Path => {
|
MetaVarKind::Path => {
|
||||||
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition, delim_span)
|
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition, delim_span)
|
||||||
.map(|it| {
|
.map(Fragment::Path);
|
||||||
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
MetaVarKind::Expr(expr) => {
|
MetaVarKind::Expr(expr) => {
|
||||||
// `expr_2021` should not match underscores, let expressions, or inline const.
|
// `expr_2021` should not match underscores, let expressions, or inline const.
|
||||||
|
|
@ -782,8 +791,8 @@ fn match_meta_var(
|
||||||
// rustc [explicitly checks the next token][1].
|
// rustc [explicitly checks the next token][1].
|
||||||
// [0]: https://github.com/rust-lang/rust/issues/86730
|
// [0]: https://github.com/rust-lang/rust/issues/86730
|
||||||
// [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576
|
// [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576
|
||||||
match input.peek_n(0) {
|
match input.peek() {
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) => {
|
Some(TtElement::Leaf(tt::Leaf::Ident(it))) => {
|
||||||
let is_err = if it.is_raw.no() && matches!(expr, ExprKind::Expr2021) {
|
let is_err = if it.is_raw.no() && matches!(expr, ExprKind::Expr2021) {
|
||||||
it.sym == sym::underscore || it.sym == sym::let_ || it.sym == sym::const_
|
it.sym == sym::underscore || it.sym == sym::let_ || it.sym == sym::const_
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -799,33 +808,14 @@ fn match_meta_var(
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition, delim_span)
|
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition, delim_span)
|
||||||
.map(|tt| {
|
.map(Fragment::Expr);
|
||||||
tt.map(|tt| match tt {
|
|
||||||
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
|
|
||||||
token_trees: Box::new([leaf.into()]),
|
|
||||||
},
|
|
||||||
tt::TokenTree::Subtree(mut s) => {
|
|
||||||
if s.delimiter.kind == tt::DelimiterKind::Invisible {
|
|
||||||
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
|
||||||
}
|
|
||||||
s
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map(Fragment::Expr)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
||||||
let span = input.next_span();
|
let span = input.next_span();
|
||||||
let tt_result = match kind {
|
let savepoint = input.savepoint();
|
||||||
MetaVarKind::Ident => input
|
let err = match kind {
|
||||||
.expect_ident()
|
MetaVarKind::Ident => input.expect_ident().map(drop).map_err(|()| {
|
||||||
.map(|ident| tt::Leaf::from(ident.clone()).into())
|
ExpandError::binding_error(span.unwrap_or(delim_span.close), "expected ident")
|
||||||
.map_err(|()| {
|
|
||||||
ExpandError::binding_error(
|
|
||||||
span.unwrap_or(delim_span.close),
|
|
||||||
"expected ident",
|
|
||||||
)
|
|
||||||
}),
|
}),
|
||||||
MetaVarKind::Tt => expect_tt(input).map_err(|()| {
|
MetaVarKind::Tt => expect_tt(input).map_err(|()| {
|
||||||
ExpandError::binding_error(
|
ExpandError::binding_error(
|
||||||
|
|
@ -840,20 +830,8 @@ fn match_meta_var(
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
MetaVarKind::Literal => {
|
MetaVarKind::Literal => {
|
||||||
let neg = eat_char(input, '-');
|
eat_char(input, '-');
|
||||||
input
|
input.expect_literal().map(drop).map_err(|()| {
|
||||||
.expect_literal()
|
|
||||||
.map(|literal| {
|
|
||||||
let lit = literal.clone();
|
|
||||||
match neg {
|
|
||||||
None => lit.into(),
|
|
||||||
Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(*literal.span()),
|
|
||||||
token_trees: Box::new([neg, lit.into()]),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map_err(|()| {
|
|
||||||
ExpandError::binding_error(
|
ExpandError::binding_error(
|
||||||
span.unwrap_or(delim_span.close),
|
span.unwrap_or(delim_span.close),
|
||||||
"expected literal",
|
"expected literal",
|
||||||
|
|
@ -861,8 +839,10 @@ fn match_meta_var(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
}
|
||||||
return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
|
.err();
|
||||||
|
let tt_result = input.from_savepoint(savepoint);
|
||||||
|
return ValueResult { value: Fragment::Tokens(tt_result), err };
|
||||||
}
|
}
|
||||||
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
|
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
|
||||||
MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop,
|
MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop,
|
||||||
|
|
@ -873,7 +853,7 @@ fn match_meta_var(
|
||||||
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
|
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
|
||||||
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
|
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
|
||||||
};
|
};
|
||||||
expect_fragment(input, fragment, edition, delim_span).map(|it| it.map(Fragment::Tokens))
|
expect_fragment(input, fragment, edition, delim_span).map(Fragment::Tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
|
fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
|
||||||
|
|
@ -990,54 +970,31 @@ fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) ->
|
||||||
ok
|
ok
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<tt::TokenTree<S>, ()> {
|
fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = iter.peek_n(0) {
|
if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = iter.peek() {
|
||||||
if punct.char == '\'' {
|
if punct.char == '\'' {
|
||||||
expect_lifetime(iter)
|
expect_lifetime(iter)?;
|
||||||
} else {
|
} else {
|
||||||
let puncts = iter.expect_glued_punct()?;
|
iter.expect_glued_punct()?;
|
||||||
let delimiter = tt::Delimiter {
|
|
||||||
open: puncts.first().unwrap().span,
|
|
||||||
close: puncts.last().unwrap().span,
|
|
||||||
kind: tt::DelimiterKind::Invisible,
|
|
||||||
};
|
|
||||||
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
|
|
||||||
Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees }))
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
iter.next().ok_or(()).cloned()
|
iter.next().ok_or(())?;
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect_lifetime<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<tt::TokenTree<S>, ()> {
|
fn expect_lifetime<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
|
||||||
let punct = iter.expect_single_punct()?;
|
let punct = iter.expect_single_punct()?;
|
||||||
if punct.char != '\'' {
|
if punct.char != '\'' {
|
||||||
return Err(());
|
return Err(());
|
||||||
}
|
}
|
||||||
let ident = iter.expect_ident_or_underscore()?;
|
iter.expect_ident_or_underscore()?;
|
||||||
|
Ok(())
|
||||||
Ok(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter {
|
|
||||||
open: punct.span,
|
|
||||||
close: ident.span,
|
|
||||||
kind: tt::DelimiterKind::Invisible,
|
|
||||||
},
|
|
||||||
token_trees: Box::new([
|
|
||||||
tt::Leaf::Punct(*punct).into(),
|
|
||||||
tt::Leaf::Ident(ident.clone()).into(),
|
|
||||||
]),
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eat_char<S: Copy>(iter: &mut TtIter<'_, S>, c: char) -> Option<tt::TokenTree<S>> {
|
fn eat_char<S: Copy>(iter: &mut TtIter<'_, S>, c: char) {
|
||||||
let mut fork = iter.clone();
|
if matches!(iter.peek(), Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char, .. }))) if *char == c)
|
||||||
match fork.expect_char(c) {
|
{
|
||||||
Ok(_) => {
|
iter.next().expect("already peeked");
|
||||||
let tt = iter.next().cloned();
|
|
||||||
*iter = fork;
|
|
||||||
tt
|
|
||||||
}
|
|
||||||
Err(_) => None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use span::{Edition, Span};
|
use span::{Edition, Span};
|
||||||
use tt::Delimiter;
|
use tt::{iter::TtElement, Delimiter, TopSubtreeBuilder};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
expander::{Binding, Bindings, Fragment},
|
expander::{Binding, Bindings, Fragment},
|
||||||
|
|
@ -11,8 +11,8 @@ use crate::{
|
||||||
ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate,
|
ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate,
|
||||||
};
|
};
|
||||||
|
|
||||||
impl Bindings {
|
impl<'t> Bindings<'t> {
|
||||||
fn get(&self, name: &Symbol, span: Span) -> Result<&Binding, ExpandError> {
|
fn get(&self, name: &Symbol, span: Span) -> Result<&Binding<'t>, ExpandError> {
|
||||||
match self.inner.get(name) {
|
match self.inner.get(name) {
|
||||||
Some(binding) => Ok(binding),
|
Some(binding) => Ok(binding),
|
||||||
None => Err(ExpandError::new(
|
None => Err(ExpandError::new(
|
||||||
|
|
@ -28,7 +28,7 @@ impl Bindings {
|
||||||
mut span: Span,
|
mut span: Span,
|
||||||
nesting: &mut [NestingState],
|
nesting: &mut [NestingState],
|
||||||
marker: impl Fn(&mut Span),
|
marker: impl Fn(&mut Span),
|
||||||
) -> Result<Fragment, ExpandError> {
|
) -> Result<Fragment<'t>, ExpandError> {
|
||||||
macro_rules! binding_err {
|
macro_rules! binding_err {
|
||||||
($($arg:tt)*) => { ExpandError::binding_error(span, format!($($arg)*)) };
|
($($arg:tt)*) => { ExpandError::binding_error(span, format!($($arg)*)) };
|
||||||
}
|
}
|
||||||
|
|
@ -50,86 +50,61 @@ impl Bindings {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
match b {
|
match b {
|
||||||
Binding::Fragment(f @ (Fragment::Path(sub) | Fragment::Expr(sub))) => {
|
Binding::Fragment(f) => Ok(f.clone()),
|
||||||
let tt::Subtree { delimiter, token_trees } = sub;
|
|
||||||
marker(&mut span);
|
|
||||||
let subtree = tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter {
|
|
||||||
// FIXME split span
|
|
||||||
open: span,
|
|
||||||
close: span,
|
|
||||||
kind: delimiter.kind,
|
|
||||||
},
|
|
||||||
token_trees: token_trees.clone(),
|
|
||||||
};
|
|
||||||
Ok(match f {
|
|
||||||
Fragment::Tokens(_) | Fragment::Empty => unreachable!(),
|
|
||||||
Fragment::Expr(_) => Fragment::Expr,
|
|
||||||
Fragment::Path(_) => Fragment::Path,
|
|
||||||
}(subtree))
|
|
||||||
}
|
|
||||||
Binding::Fragment(it @ (Fragment::Tokens(_) | Fragment::Empty)) => Ok(it.clone()),
|
|
||||||
// emit some reasonable default expansion for missing bindings,
|
// emit some reasonable default expansion for missing bindings,
|
||||||
// this gives better recovery than emitting the `$fragment-name` verbatim
|
// this gives better recovery than emitting the `$fragment-name` verbatim
|
||||||
Binding::Missing(it) => Ok({
|
Binding::Missing(it) => Ok({
|
||||||
marker(&mut span);
|
marker(&mut span);
|
||||||
|
let mut builder = TopSubtreeBuilder::new(tt::Delimiter::invisible_spanned(span));
|
||||||
match it {
|
match it {
|
||||||
MetaVarKind::Stmt => {
|
MetaVarKind::Stmt => {
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
builder.push(tt::Leaf::Punct(tt::Punct {
|
||||||
span,
|
span,
|
||||||
char: ';',
|
char: ';',
|
||||||
spacing: tt::Spacing::Alone,
|
spacing: tt::Spacing::Alone,
|
||||||
})))
|
}));
|
||||||
|
}
|
||||||
|
MetaVarKind::Block => {
|
||||||
|
builder.open(tt::DelimiterKind::Brace, span);
|
||||||
|
builder.close(span);
|
||||||
}
|
}
|
||||||
MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter {
|
|
||||||
open: span,
|
|
||||||
close: span,
|
|
||||||
kind: tt::DelimiterKind::Brace,
|
|
||||||
},
|
|
||||||
token_trees: Box::new([]),
|
|
||||||
})),
|
|
||||||
// FIXME: Meta and Item should get proper defaults
|
// FIXME: Meta and Item should get proper defaults
|
||||||
MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
|
MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {}
|
||||||
Fragment::Empty
|
|
||||||
}
|
|
||||||
MetaVarKind::Path
|
MetaVarKind::Path
|
||||||
| MetaVarKind::Ty
|
| MetaVarKind::Ty
|
||||||
| MetaVarKind::Pat
|
| MetaVarKind::Pat
|
||||||
| MetaVarKind::PatParam
|
| MetaVarKind::PatParam
|
||||||
| MetaVarKind::Expr(_)
|
| MetaVarKind::Expr(_)
|
||||||
| MetaVarKind::Ident => {
|
| MetaVarKind::Ident => {
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
builder.push(tt::Leaf::Ident(tt::Ident {
|
||||||
sym: sym::missing.clone(),
|
sym: sym::missing.clone(),
|
||||||
span,
|
span,
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
})))
|
}));
|
||||||
}
|
}
|
||||||
MetaVarKind::Lifetime => {
|
MetaVarKind::Lifetime => {
|
||||||
Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
|
builder.extend([
|
||||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
tt::Leaf::Punct(tt::Punct {
|
||||||
token_trees: Box::new([
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
|
||||||
char: '\'',
|
char: '\'',
|
||||||
span,
|
span,
|
||||||
spacing: tt::Spacing::Joint,
|
spacing: tt::Spacing::Joint,
|
||||||
})),
|
}),
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
tt::Leaf::Ident(tt::Ident {
|
||||||
sym: sym::missing.clone(),
|
sym: sym::missing.clone(),
|
||||||
span,
|
span,
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
})),
|
}),
|
||||||
]),
|
]);
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
MetaVarKind::Literal => {
|
MetaVarKind::Literal => {
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
builder.push(tt::Leaf::Ident(tt::Ident {
|
||||||
sym: sym::missing.clone(),
|
sym: sym::missing.clone(),
|
||||||
span,
|
span,
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
})))
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Fragment::TokensOwned(builder.build())
|
||||||
}),
|
}),
|
||||||
Binding::Nested(_) => {
|
Binding::Nested(_) => {
|
||||||
Err(binding_err!("expected simple binding, found nested binding `{name}`"))
|
Err(binding_err!("expected simple binding, found nested binding `{name}`"))
|
||||||
|
|
@ -143,13 +118,13 @@ impl Bindings {
|
||||||
|
|
||||||
pub(super) fn transcribe(
|
pub(super) fn transcribe(
|
||||||
template: &MetaTemplate,
|
template: &MetaTemplate,
|
||||||
bindings: &Bindings,
|
bindings: &Bindings<'_>,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree<Span>> {
|
) -> ExpandResult<tt::TopSubtree<Span>> {
|
||||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site };
|
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), call_site };
|
||||||
let mut arena: Vec<tt::TokenTree<Span>> = Vec::new();
|
let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter::invisible_spanned(ctx.call_site));
|
||||||
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
expand_subtree(&mut ctx, template, &mut builder, marker).map(|()| builder.build())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
@ -165,103 +140,97 @@ struct NestingState {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct ExpandCtx<'a> {
|
struct ExpandCtx<'a> {
|
||||||
bindings: &'a Bindings,
|
bindings: &'a Bindings<'a>,
|
||||||
nesting: Vec<NestingState>,
|
nesting: Vec<NestingState>,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn expand_subtree_with_delimiter(
|
||||||
|
ctx: &mut ExpandCtx<'_>,
|
||||||
|
template: &MetaTemplate,
|
||||||
|
builder: &mut tt::TopSubtreeBuilder<Span>,
|
||||||
|
delimiter: Option<Delimiter<Span>>,
|
||||||
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
|
) -> ExpandResult<()> {
|
||||||
|
let delimiter = delimiter.unwrap_or_else(|| tt::Delimiter::invisible_spanned(ctx.call_site));
|
||||||
|
builder.open(delimiter.kind, delimiter.open);
|
||||||
|
let result = expand_subtree(ctx, template, builder, marker);
|
||||||
|
builder.close(delimiter.close);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
fn expand_subtree(
|
fn expand_subtree(
|
||||||
ctx: &mut ExpandCtx<'_>,
|
ctx: &mut ExpandCtx<'_>,
|
||||||
template: &MetaTemplate,
|
template: &MetaTemplate,
|
||||||
delimiter: Option<Delimiter<Span>>,
|
builder: &mut tt::TopSubtreeBuilder<Span>,
|
||||||
arena: &mut Vec<tt::TokenTree<Span>>,
|
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
) -> ExpandResult<tt::Subtree<Span>> {
|
) -> ExpandResult<()> {
|
||||||
// remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
|
|
||||||
let start_elements = arena.len();
|
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
'ops: for op in template.iter() {
|
'ops: for op in template.iter() {
|
||||||
match op {
|
match op {
|
||||||
Op::Literal(it) => arena.push(
|
Op::Literal(it) => builder.push(tt::Leaf::from({
|
||||||
tt::Leaf::from({
|
|
||||||
let mut it = it.clone();
|
let mut it = it.clone();
|
||||||
marker(&mut it.span);
|
marker(&mut it.span);
|
||||||
it
|
it
|
||||||
})
|
})),
|
||||||
.into(),
|
Op::Ident(it) => builder.push(tt::Leaf::from({
|
||||||
),
|
|
||||||
Op::Ident(it) => arena.push(
|
|
||||||
tt::Leaf::from({
|
|
||||||
let mut it = it.clone();
|
let mut it = it.clone();
|
||||||
marker(&mut it.span);
|
marker(&mut it.span);
|
||||||
it
|
it
|
||||||
})
|
})),
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
Op::Punct(puncts) => {
|
Op::Punct(puncts) => {
|
||||||
for punct in puncts.as_slice() {
|
builder.extend(puncts.iter().map(|punct| {
|
||||||
arena.push(
|
|
||||||
tt::Leaf::from({
|
tt::Leaf::from({
|
||||||
let mut it = *punct;
|
let mut it = *punct;
|
||||||
marker(&mut it.span);
|
marker(&mut it.span);
|
||||||
it
|
it
|
||||||
})
|
})
|
||||||
.into(),
|
}));
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Op::Subtree { tokens, delimiter } => {
|
Op::Subtree { tokens, delimiter } => {
|
||||||
let mut delimiter = *delimiter;
|
let mut delimiter = *delimiter;
|
||||||
marker(&mut delimiter.open);
|
marker(&mut delimiter.open);
|
||||||
marker(&mut delimiter.close);
|
marker(&mut delimiter.close);
|
||||||
let ExpandResult { value: tt, err: e } =
|
let ExpandResult { value: (), err: e } =
|
||||||
expand_subtree(ctx, tokens, Some(delimiter), arena, marker);
|
expand_subtree_with_delimiter(ctx, tokens, builder, Some(delimiter), marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
arena.push(tt.into());
|
|
||||||
}
|
}
|
||||||
Op::Var { name, id, .. } => {
|
Op::Var { name, id, .. } => {
|
||||||
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
|
let ExpandResult { value: (), err: e } =
|
||||||
|
expand_var(ctx, name, *id, builder, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
push_fragment(ctx, arena, fragment);
|
|
||||||
}
|
}
|
||||||
Op::Repeat { tokens: subtree, kind, separator } => {
|
Op::Repeat { tokens: subtree, kind, separator } => {
|
||||||
let ExpandResult { value: fragment, err: e } =
|
let ExpandResult { value: (), err: e } =
|
||||||
expand_repeat(ctx, subtree, *kind, separator.as_deref(), arena, marker);
|
expand_repeat(ctx, subtree, *kind, separator.as_deref(), builder, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
push_fragment(ctx, arena, fragment)
|
|
||||||
}
|
}
|
||||||
Op::Ignore { name, id } => {
|
Op::Ignore { name, id } => {
|
||||||
// Expand the variable, but ignore the result. This registers the repetition count.
|
// Expand the variable, but ignore the result. This registers the repetition count.
|
||||||
// FIXME: Any emitted errors are dropped.
|
// FIXME: Any emitted errors are dropped.
|
||||||
expand_var(ctx, name, *id, marker);
|
let _ = ctx.bindings.get_fragment(name, *id, &mut ctx.nesting, marker);
|
||||||
}
|
}
|
||||||
Op::Index { depth } => {
|
Op::Index { depth } => {
|
||||||
let index =
|
let index =
|
||||||
ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |nest| nest.idx);
|
ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |nest| nest.idx);
|
||||||
arena.push(
|
builder.push(tt::Leaf::Literal(tt::Literal {
|
||||||
tt::Leaf::Literal(tt::Literal {
|
|
||||||
symbol: Symbol::integer(index),
|
symbol: Symbol::integer(index),
|
||||||
span: ctx.call_site,
|
span: ctx.call_site,
|
||||||
kind: tt::LitKind::Integer,
|
kind: tt::LitKind::Integer,
|
||||||
suffix: None,
|
suffix: None,
|
||||||
})
|
}));
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Op::Len { depth } => {
|
Op::Len { depth } => {
|
||||||
let length = ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |_nest| {
|
let length = ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |_nest| {
|
||||||
// FIXME: to be implemented
|
// FIXME: to be implemented
|
||||||
0
|
0
|
||||||
});
|
});
|
||||||
arena.push(
|
builder.push(tt::Leaf::Literal(tt::Literal {
|
||||||
tt::Leaf::Literal(tt::Literal {
|
|
||||||
symbol: Symbol::integer(length),
|
symbol: Symbol::integer(length),
|
||||||
span: ctx.call_site,
|
span: ctx.call_site,
|
||||||
kind: tt::LitKind::Integer,
|
kind: tt::LitKind::Integer,
|
||||||
suffix: None,
|
suffix: None,
|
||||||
})
|
}));
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Op::Count { name, depth } => {
|
Op::Count { name, depth } => {
|
||||||
let mut binding = match ctx.bindings.get(name, ctx.call_site) {
|
let mut binding = match ctx.bindings.get(name, ctx.call_site) {
|
||||||
|
|
@ -302,15 +271,12 @@ fn expand_subtree(
|
||||||
|
|
||||||
let res = count(binding, 0, depth.unwrap_or(0));
|
let res = count(binding, 0, depth.unwrap_or(0));
|
||||||
|
|
||||||
arena.push(
|
builder.push(tt::Leaf::Literal(tt::Literal {
|
||||||
tt::Leaf::Literal(tt::Literal {
|
|
||||||
symbol: Symbol::integer(res),
|
symbol: Symbol::integer(res),
|
||||||
span: ctx.call_site,
|
span: ctx.call_site,
|
||||||
suffix: None,
|
suffix: None,
|
||||||
kind: tt::LitKind::Integer,
|
kind: tt::LitKind::Integer,
|
||||||
})
|
}));
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Op::Concat { elements, span: concat_span } => {
|
Op::Concat { elements, span: concat_span } => {
|
||||||
let mut concatenated = String::new();
|
let mut concatenated = String::new();
|
||||||
|
|
@ -342,11 +308,22 @@ fn expand_subtree(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let value = match &var_value {
|
let values = match &var_value {
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => {
|
Fragment::Tokens(tokens) => {
|
||||||
|
let mut iter = tokens.iter();
|
||||||
|
(iter.next(), iter.next())
|
||||||
|
}
|
||||||
|
Fragment::TokensOwned(tokens) => {
|
||||||
|
let mut iter = tokens.iter();
|
||||||
|
(iter.next(), iter.next())
|
||||||
|
}
|
||||||
|
_ => (None, None),
|
||||||
|
};
|
||||||
|
let value = match values {
|
||||||
|
(Some(TtElement::Leaf(tt::Leaf::Ident(ident))), None) => {
|
||||||
ident.sym.as_str()
|
ident.sym.as_str()
|
||||||
}
|
}
|
||||||
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
|
(Some(TtElement::Leaf(tt::Leaf::Literal(lit))), None) => {
|
||||||
lit.symbol.as_str()
|
lit.symbol.as_str()
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
|
@ -382,36 +359,53 @@ fn expand_subtree(
|
||||||
let needs_raw =
|
let needs_raw =
|
||||||
parser::SyntaxKind::from_keyword(&concatenated, Edition::LATEST).is_some();
|
parser::SyntaxKind::from_keyword(&concatenated, Edition::LATEST).is_some();
|
||||||
let is_raw = if needs_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No };
|
let is_raw = if needs_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No };
|
||||||
arena.push(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
builder.push(tt::Leaf::Ident(tt::Ident {
|
||||||
is_raw,
|
is_raw,
|
||||||
span: result_span,
|
span: result_span,
|
||||||
sym: Symbol::intern(&concatenated),
|
sym: Symbol::intern(&concatenated),
|
||||||
})));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// drain the elements added in this instance of expand_subtree
|
ExpandResult { value: (), err }
|
||||||
let tts = arena.drain(start_elements..).collect();
|
|
||||||
ExpandResult {
|
|
||||||
value: tt::Subtree {
|
|
||||||
delimiter: delimiter.unwrap_or_else(|| tt::Delimiter::invisible_spanned(ctx.call_site)),
|
|
||||||
token_trees: tts,
|
|
||||||
},
|
|
||||||
err,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_var(
|
fn expand_var(
|
||||||
ctx: &mut ExpandCtx<'_>,
|
ctx: &mut ExpandCtx<'_>,
|
||||||
v: &Symbol,
|
v: &Symbol,
|
||||||
id: Span,
|
id: Span,
|
||||||
marker: impl Fn(&mut Span),
|
builder: &mut tt::TopSubtreeBuilder<Span>,
|
||||||
) -> ExpandResult<Fragment> {
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
|
) -> ExpandResult<()> {
|
||||||
// We already handle $crate case in mbe parser
|
// We already handle $crate case in mbe parser
|
||||||
debug_assert!(*v != sym::crate_);
|
debug_assert!(*v != sym::crate_);
|
||||||
|
|
||||||
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
|
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
|
||||||
Ok(it) => ExpandResult::ok(it),
|
Ok(fragment) => {
|
||||||
|
match fragment {
|
||||||
|
Fragment::Tokens(tt) => builder.extend_with_tt(tt.strip_invisible()),
|
||||||
|
Fragment::TokensOwned(tt) => builder.extend_with_tt(tt.view().strip_invisible()),
|
||||||
|
Fragment::Expr(sub) => {
|
||||||
|
let sub = sub.strip_invisible();
|
||||||
|
let mut span = id;
|
||||||
|
marker(&mut span);
|
||||||
|
let wrap_in_parens = !matches!(sub.flat_tokens(), [tt::TokenTree::Leaf(_)])
|
||||||
|
&& sub.try_into_subtree().map_or(true, |it| {
|
||||||
|
it.top_subtree().delimiter.kind == tt::DelimiterKind::Invisible
|
||||||
|
});
|
||||||
|
if wrap_in_parens {
|
||||||
|
builder.open(tt::DelimiterKind::Parenthesis, span);
|
||||||
|
}
|
||||||
|
builder.extend_with_tt(sub);
|
||||||
|
if wrap_in_parens {
|
||||||
|
builder.close(span);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, builder, tt),
|
||||||
|
Fragment::Empty => (),
|
||||||
|
};
|
||||||
|
ExpandResult::ok(())
|
||||||
|
}
|
||||||
Err(e) if matches!(e.inner.1, ExpandErrorKind::UnresolvedBinding(_)) => {
|
Err(e) if matches!(e.inner.1, ExpandErrorKind::UnresolvedBinding(_)) => {
|
||||||
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
// Note that it is possible to have a `$var` inside a macro which is not bound.
|
||||||
// For example:
|
// For example:
|
||||||
|
|
@ -426,29 +420,13 @@ fn expand_var(
|
||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
// We just treat it a normal tokens
|
// We just treat it a normal tokens
|
||||||
let tt = tt::Subtree {
|
builder.extend([
|
||||||
delimiter: tt::Delimiter::invisible_spanned(id),
|
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }),
|
||||||
token_trees: Box::new([
|
tt::Leaf::from(tt::Ident { sym: v.clone(), span: id, is_raw: tt::IdentIsRaw::No }),
|
||||||
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
|
]);
|
||||||
.into(),
|
ExpandResult::ok(())
|
||||||
tt::Leaf::from(tt::Ident {
|
|
||||||
sym: v.clone(),
|
|
||||||
span: id,
|
|
||||||
is_raw: tt::IdentIsRaw::No,
|
|
||||||
})
|
|
||||||
.into(),
|
|
||||||
]),
|
|
||||||
}
|
}
|
||||||
.into();
|
Err(e) => ExpandResult::only_err(e),
|
||||||
ExpandResult::ok(Fragment::Tokens(tt))
|
|
||||||
}
|
|
||||||
Err(e) => ExpandResult {
|
|
||||||
value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
|
|
||||||
open: ctx.call_site,
|
|
||||||
close: ctx.call_site,
|
|
||||||
}))),
|
|
||||||
err: Some(e),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -457,21 +435,20 @@ fn expand_repeat(
|
||||||
template: &MetaTemplate,
|
template: &MetaTemplate,
|
||||||
kind: RepeatKind,
|
kind: RepeatKind,
|
||||||
separator: Option<&Separator>,
|
separator: Option<&Separator>,
|
||||||
arena: &mut Vec<tt::TokenTree<Span>>,
|
builder: &mut tt::TopSubtreeBuilder<Span>,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
) -> ExpandResult<Fragment> {
|
) -> ExpandResult<()> {
|
||||||
let mut buf: Vec<tt::TokenTree<Span>> = Vec::new();
|
|
||||||
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
|
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
|
||||||
// Dirty hack to make macro-expansion terminate.
|
// Dirty hack to make macro-expansion terminate.
|
||||||
// This should be replaced by a proper macro-by-example implementation
|
// This should be replaced by a proper macro-by-example implementation
|
||||||
let limit = 65536;
|
let limit = 65536;
|
||||||
let mut has_seps = 0;
|
|
||||||
let mut counter = 0;
|
let mut counter = 0;
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
|
|
||||||
|
let mut restore_point = builder.restore_point();
|
||||||
loop {
|
loop {
|
||||||
let ExpandResult { value: mut t, err: e } =
|
let ExpandResult { value: (), err: e } =
|
||||||
expand_subtree(ctx, template, None, arena, marker);
|
expand_subtree_with_delimiter(ctx, template, builder, None, marker);
|
||||||
let nesting_state = ctx.nesting.last_mut().unwrap();
|
let nesting_state = ctx.nesting.last_mut().unwrap();
|
||||||
if nesting_state.at_end || !nesting_state.hit {
|
if nesting_state.at_end || !nesting_state.hit {
|
||||||
break;
|
break;
|
||||||
|
|
@ -479,6 +456,10 @@ fn expand_repeat(
|
||||||
nesting_state.idx += 1;
|
nesting_state.idx += 1;
|
||||||
nesting_state.hit = false;
|
nesting_state.hit = false;
|
||||||
|
|
||||||
|
builder.remove_last_subtree_if_invisible();
|
||||||
|
|
||||||
|
restore_point = builder.restore_point();
|
||||||
|
|
||||||
counter += 1;
|
counter += 1;
|
||||||
if counter == limit {
|
if counter == limit {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
|
|
@ -486,16 +467,8 @@ fn expand_repeat(
|
||||||
template,
|
template,
|
||||||
ctx
|
ctx
|
||||||
);
|
);
|
||||||
return ExpandResult {
|
err = Some(ExpandError::new(ctx.call_site, ExpandErrorKind::LimitExceeded));
|
||||||
value: Fragment::Tokens(
|
break;
|
||||||
tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
|
|
||||||
token_trees: Box::new([]),
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
err: Some(ExpandError::new(ctx.call_site, ExpandErrorKind::LimitExceeded)),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if e.is_some() {
|
if e.is_some() {
|
||||||
|
|
@ -503,24 +476,14 @@ fn expand_repeat(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
t.delimiter.kind = tt::DelimiterKind::Invisible;
|
|
||||||
push_subtree(&mut buf, t);
|
|
||||||
|
|
||||||
if let Some(sep) = separator {
|
if let Some(sep) = separator {
|
||||||
has_seps = match sep {
|
match sep {
|
||||||
Separator::Ident(ident) => {
|
Separator::Ident(ident) => builder.push(tt::Leaf::from(ident.clone())),
|
||||||
buf.push(tt::Leaf::from(ident.clone()).into());
|
Separator::Literal(lit) => builder.push(tt::Leaf::from(lit.clone())),
|
||||||
1
|
|
||||||
}
|
|
||||||
Separator::Literal(lit) => {
|
|
||||||
buf.push(tt::Leaf::from(lit.clone()).into());
|
|
||||||
1
|
|
||||||
}
|
|
||||||
Separator::Puncts(puncts) => {
|
Separator::Puncts(puncts) => {
|
||||||
for &punct in puncts {
|
for &punct in puncts {
|
||||||
buf.push(tt::Leaf::from(punct).into());
|
builder.push(tt::Leaf::from(punct));
|
||||||
}
|
}
|
||||||
puncts.len()
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
@ -529,46 +492,18 @@ fn expand_repeat(
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Lose the last separator and last after-the-end round.
|
||||||
|
builder.restore(restore_point);
|
||||||
|
|
||||||
ctx.nesting.pop().unwrap();
|
ctx.nesting.pop().unwrap();
|
||||||
for _ in 0..has_seps {
|
|
||||||
buf.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it is a single token subtree without any delimiter
|
// Check if it is a single token subtree without any delimiter
|
||||||
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
|
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
|
||||||
let tt = tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
|
|
||||||
token_trees: buf.into_boxed_slice(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if RepeatKind::OneOrMore == kind && counter == 0 {
|
if RepeatKind::OneOrMore == kind && counter == 0 && err.is_none() {
|
||||||
let span = tt.delimiter.open;
|
err = Some(ExpandError::new(ctx.call_site, ExpandErrorKind::UnexpectedToken));
|
||||||
return ExpandResult {
|
|
||||||
value: Fragment::Tokens(tt.into()),
|
|
||||||
err: Some(ExpandError::new(span, ExpandErrorKind::UnexpectedToken)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
ExpandResult { value: Fragment::Tokens(tt.into()), err }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push_fragment(ctx: &ExpandCtx<'_>, buf: &mut Vec<tt::TokenTree<Span>>, fragment: Fragment) {
|
|
||||||
match fragment {
|
|
||||||
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
|
||||||
Fragment::Expr(sub) => {
|
|
||||||
push_subtree(buf, sub);
|
|
||||||
}
|
|
||||||
Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, buf, tt),
|
|
||||||
Fragment::Tokens(tt) => buf.push(tt),
|
|
||||||
Fragment::Empty => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push_subtree(buf: &mut Vec<tt::TokenTree<Span>>, tt: tt::Subtree<Span>) {
|
|
||||||
match tt.delimiter.kind {
|
|
||||||
tt::DelimiterKind::Invisible => buf.extend(Vec::from(tt.token_trees)),
|
|
||||||
_ => buf.push(tt.into()),
|
|
||||||
}
|
}
|
||||||
|
ExpandResult { value: (), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Inserts the path separator `::` between an identifier and its following generic
|
/// Inserts the path separator `::` between an identifier and its following generic
|
||||||
|
|
@ -576,47 +511,45 @@ fn push_subtree(buf: &mut Vec<tt::TokenTree<Span>>, tt: tt::Subtree<Span>) {
|
||||||
/// we need this fixup.
|
/// we need this fixup.
|
||||||
fn fix_up_and_push_path_tt(
|
fn fix_up_and_push_path_tt(
|
||||||
ctx: &ExpandCtx<'_>,
|
ctx: &ExpandCtx<'_>,
|
||||||
buf: &mut Vec<tt::TokenTree<Span>>,
|
builder: &mut tt::TopSubtreeBuilder<Span>,
|
||||||
subtree: tt::Subtree<Span>,
|
subtree: tt::TokenTreesView<'_, Span>,
|
||||||
) {
|
) {
|
||||||
stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
|
|
||||||
let mut prev_was_ident = false;
|
let mut prev_was_ident = false;
|
||||||
// Note that we only need to fix up the top-level `TokenTree`s because the
|
// Note that we only need to fix up the top-level `TokenTree`s because the
|
||||||
// context of the paths in the descendant `Subtree`s won't be changed by the
|
// context of the paths in the descendant `Subtree`s won't be changed by the
|
||||||
// mbe transcription.
|
// mbe transcription.
|
||||||
for tt in Vec::from(subtree.token_trees) {
|
let mut iter = subtree.iter();
|
||||||
|
while let Some(tt) = iter.next_as_view() {
|
||||||
if prev_was_ident {
|
if prev_was_ident {
|
||||||
// Pedantically, `(T) -> U` in `FnOnce(T) -> U` is treated as a generic
|
// Pedantically, `(T) -> U` in `FnOnce(T) -> U` is treated as a generic
|
||||||
// argument list and thus needs `::` between it and `FnOnce`. However in
|
// argument list and thus needs `::` between it and `FnOnce`. However in
|
||||||
// today's Rust this type of path *semantically* cannot appear as a
|
// today's Rust this type of path *semantically* cannot appear as a
|
||||||
// top-level expression-context path, so we can safely ignore it.
|
// top-level expression-context path, so we can safely ignore it.
|
||||||
if let tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '<', .. })) = tt {
|
if let [tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '<', .. }))] =
|
||||||
buf.push(
|
tt.flat_tokens()
|
||||||
|
{
|
||||||
|
builder.extend([
|
||||||
tt::Leaf::Punct(tt::Punct {
|
tt::Leaf::Punct(tt::Punct {
|
||||||
char: ':',
|
char: ':',
|
||||||
spacing: tt::Spacing::Joint,
|
spacing: tt::Spacing::Joint,
|
||||||
span: ctx.call_site,
|
span: ctx.call_site,
|
||||||
})
|
}),
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
buf.push(
|
|
||||||
tt::Leaf::Punct(tt::Punct {
|
tt::Leaf::Punct(tt::Punct {
|
||||||
char: ':',
|
char: ':',
|
||||||
spacing: tt::Spacing::Alone,
|
spacing: tt::Spacing::Alone,
|
||||||
span: ctx.call_site,
|
span: ctx.call_site,
|
||||||
})
|
}),
|
||||||
.into(),
|
]);
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
prev_was_ident = matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(_)));
|
prev_was_ident = matches!(tt.flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(_))]);
|
||||||
buf.push(tt);
|
builder.extend_with_tt(tt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
|
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
|
||||||
/// defined by the metavar expression.
|
/// defined by the metavar expression.
|
||||||
fn count(binding: &Binding, depth_curr: usize, depth_max: usize) -> usize {
|
fn count(binding: &Binding<'_>, depth_curr: usize, depth_max: usize) -> usize {
|
||||||
match binding {
|
match binding {
|
||||||
Binding::Nested(bs) => {
|
Binding::Nested(bs) => {
|
||||||
if depth_curr == depth_max {
|
if depth_curr == depth_max {
|
||||||
|
|
|
||||||
|
|
@ -148,17 +148,17 @@ impl DeclarativeMacro {
|
||||||
|
|
||||||
/// The old, `macro_rules! m {}` flavor.
|
/// The old, `macro_rules! m {}` flavor.
|
||||||
pub fn parse_macro_rules(
|
pub fn parse_macro_rules(
|
||||||
tt: &tt::Subtree<Span>,
|
tt: &tt::TopSubtree<Span>,
|
||||||
ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||||
) -> DeclarativeMacro {
|
) -> DeclarativeMacro {
|
||||||
// Note: this parsing can be implemented using mbe machinery itself, by
|
// Note: this parsing can be implemented using mbe machinery itself, by
|
||||||
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
|
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
|
||||||
// manually seems easier.
|
// manually seems easier.
|
||||||
let mut src = TtIter::new(tt);
|
let mut src = tt.iter();
|
||||||
let mut rules = Vec::new();
|
let mut rules = Vec::new();
|
||||||
let mut err = None;
|
let mut err = None;
|
||||||
|
|
||||||
while src.len() > 0 {
|
while !src.is_empty() {
|
||||||
let rule = match Rule::parse(ctx_edition, &mut src) {
|
let rule = match Rule::parse(ctx_edition, &mut src) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
@ -168,7 +168,7 @@ impl DeclarativeMacro {
|
||||||
};
|
};
|
||||||
rules.push(rule);
|
rules.push(rule);
|
||||||
if let Err(()) = src.expect_char(';') {
|
if let Err(()) = src.expect_char(';') {
|
||||||
if src.len() > 0 {
|
if !src.is_empty() {
|
||||||
err = Some(Box::new(ParseError::expected("expected `;`")));
|
err = Some(Box::new(ParseError::expected("expected `;`")));
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
@ -187,8 +187,8 @@ impl DeclarativeMacro {
|
||||||
|
|
||||||
/// The new, unstable `macro m {}` flavor.
|
/// The new, unstable `macro m {}` flavor.
|
||||||
pub fn parse_macro2(
|
pub fn parse_macro2(
|
||||||
args: Option<&tt::Subtree<Span>>,
|
args: Option<&tt::TopSubtree<Span>>,
|
||||||
body: &tt::Subtree<Span>,
|
body: &tt::TopSubtree<Span>,
|
||||||
ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||||
) -> DeclarativeMacro {
|
) -> DeclarativeMacro {
|
||||||
let mut rules = Vec::new();
|
let mut rules = Vec::new();
|
||||||
|
|
@ -198,8 +198,8 @@ impl DeclarativeMacro {
|
||||||
cov_mark::hit!(parse_macro_def_simple);
|
cov_mark::hit!(parse_macro_def_simple);
|
||||||
|
|
||||||
let rule = (|| {
|
let rule = (|| {
|
||||||
let lhs = MetaTemplate::parse_pattern(ctx_edition, args)?;
|
let lhs = MetaTemplate::parse_pattern(ctx_edition, args.iter())?;
|
||||||
let rhs = MetaTemplate::parse_template(ctx_edition, body)?;
|
let rhs = MetaTemplate::parse_template(ctx_edition, body.iter())?;
|
||||||
|
|
||||||
Ok(crate::Rule { lhs, rhs })
|
Ok(crate::Rule { lhs, rhs })
|
||||||
})();
|
})();
|
||||||
|
|
@ -210,8 +210,8 @@ impl DeclarativeMacro {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
cov_mark::hit!(parse_macro_def_rules);
|
cov_mark::hit!(parse_macro_def_rules);
|
||||||
let mut src = TtIter::new(body);
|
let mut src = body.iter();
|
||||||
while src.len() > 0 {
|
while !src.is_empty() {
|
||||||
let rule = match Rule::parse(ctx_edition, &mut src) {
|
let rule = match Rule::parse(ctx_edition, &mut src) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
@ -221,7 +221,7 @@ impl DeclarativeMacro {
|
||||||
};
|
};
|
||||||
rules.push(rule);
|
rules.push(rule);
|
||||||
if let Err(()) = src.expect_any_char(&[';', ',']) {
|
if let Err(()) = src.expect_any_char(&[';', ',']) {
|
||||||
if src.len() > 0 {
|
if !src.is_empty() {
|
||||||
err = Some(Box::new(ParseError::expected(
|
err = Some(Box::new(ParseError::expected(
|
||||||
"expected `;` or `,` to delimit rules",
|
"expected `;` or `,` to delimit rules",
|
||||||
)));
|
)));
|
||||||
|
|
@ -251,11 +251,11 @@ impl DeclarativeMacro {
|
||||||
|
|
||||||
pub fn expand(
|
pub fn expand(
|
||||||
&self,
|
&self,
|
||||||
tt: &tt::Subtree<Span>,
|
tt: &tt::TopSubtree<Span>,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
def_site_edition: Edition,
|
def_site_edition: Edition,
|
||||||
) -> ExpandResult<(tt::Subtree<Span>, MatchedArmIndex)> {
|
) -> ExpandResult<(tt::TopSubtree<Span>, MatchedArmIndex)> {
|
||||||
expander::expand_rules(&self.rules, tt, marker, call_site, def_site_edition)
|
expander::expand_rules(&self.rules, tt, marker, call_site, def_site_edition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -265,10 +265,12 @@ impl Rule {
|
||||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||||
src: &mut TtIter<'_, Span>,
|
src: &mut TtIter<'_, Span>,
|
||||||
) -> Result<Self, ParseError> {
|
) -> Result<Self, ParseError> {
|
||||||
let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
|
let (_, lhs) =
|
||||||
|
src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
|
||||||
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
|
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
|
||||||
src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?;
|
src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?;
|
||||||
let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
|
let (_, rhs) =
|
||||||
|
src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
|
||||||
|
|
||||||
let lhs = MetaTemplate::parse_pattern(edition, lhs)?;
|
let lhs = MetaTemplate::parse_pattern(edition, lhs)?;
|
||||||
let rhs = MetaTemplate::parse_template(edition, rhs)?;
|
let rhs = MetaTemplate::parse_template(edition, rhs)?;
|
||||||
|
|
@ -359,17 +361,17 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_fragment(
|
pub fn expect_fragment<'t>(
|
||||||
tt_iter: &mut TtIter<'_, Span>,
|
tt_iter: &mut TtIter<'t, Span>,
|
||||||
entry_point: ::parser::PrefixEntryPoint,
|
entry_point: ::parser::PrefixEntryPoint,
|
||||||
edition: ::parser::Edition,
|
edition: ::parser::Edition,
|
||||||
delim_span: DelimSpan<Span>,
|
delim_span: DelimSpan<Span>,
|
||||||
) -> ExpandResult<Option<tt::TokenTree<Span>>> {
|
) -> ExpandResult<tt::TokenTreesView<'t, Span>> {
|
||||||
use ::parser;
|
use ::parser;
|
||||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
let buffer = tt_iter.remaining();
|
||||||
let parser_input = to_parser_input(edition, &buffer);
|
let parser_input = to_parser_input(edition, buffer);
|
||||||
let tree_traversal = entry_point.parse(&parser_input, edition);
|
let tree_traversal = entry_point.parse(&parser_input, edition);
|
||||||
let mut cursor = buffer.begin();
|
let mut cursor = buffer.cursor();
|
||||||
let mut error = false;
|
let mut error = false;
|
||||||
for step in tree_traversal.iter() {
|
for step in tree_traversal.iter() {
|
||||||
match step {
|
match step {
|
||||||
|
|
@ -378,13 +380,13 @@ pub fn expect_fragment(
|
||||||
n_input_tokens = 2;
|
n_input_tokens = 2;
|
||||||
}
|
}
|
||||||
for _ in 0..n_input_tokens {
|
for _ in 0..n_input_tokens {
|
||||||
cursor = cursor.bump_subtree();
|
cursor.bump_or_end();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
parser::Step::FloatSplit { .. } => {
|
parser::Step::FloatSplit { .. } => {
|
||||||
// FIXME: We need to split the tree properly here, but mutating the token trees
|
// FIXME: We need to split the tree properly here, but mutating the token trees
|
||||||
// in the buffer is somewhat tricky to pull off.
|
// in the buffer is somewhat tricky to pull off.
|
||||||
cursor = cursor.bump_subtree();
|
cursor.bump_or_end();
|
||||||
}
|
}
|
||||||
parser::Step::Enter { .. } | parser::Step::Exit => (),
|
parser::Step::Enter { .. } | parser::Step::Exit => (),
|
||||||
parser::Step::Error { .. } => error = true,
|
parser::Step::Error { .. } => error = true,
|
||||||
|
|
@ -393,29 +395,19 @@ pub fn expect_fragment(
|
||||||
|
|
||||||
let err = if error || !cursor.is_root() {
|
let err = if error || !cursor.is_root() {
|
||||||
Some(ExpandError::binding_error(
|
Some(ExpandError::binding_error(
|
||||||
buffer.begin().token_tree().map_or(delim_span.close, |tt| tt.span()),
|
buffer.cursor().token_tree().map_or(delim_span.close, |tt| tt.first_span()),
|
||||||
format!("expected {entry_point:?}"),
|
format!("expected {entry_point:?}"),
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut curr = buffer.begin();
|
while !cursor.is_root() {
|
||||||
let mut res = vec![];
|
cursor.bump_or_end();
|
||||||
|
|
||||||
while curr != cursor {
|
|
||||||
let Some(token) = curr.token_tree() else { break };
|
|
||||||
res.push(token.cloned());
|
|
||||||
curr = curr.bump();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*tt_iter = TtIter::new_iter(tt_iter.as_slice()[res.len()..].iter());
|
let res = cursor.crossed();
|
||||||
let res = match &*res {
|
tt_iter.flat_advance(res.len());
|
||||||
[] | [_] => res.pop(),
|
|
||||||
[first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree {
|
|
||||||
delimiter: Delimiter::invisible_spanned(first.first_span()),
|
|
||||||
token_trees: res.into_boxed_slice(),
|
|
||||||
})),
|
|
||||||
};
|
|
||||||
ExpandResult { value: res, err }
|
ExpandResult { value: res, err }
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ use std::sync::Arc;
|
||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use span::{Edition, Span, SyntaxContextId};
|
use span::{Edition, Span, SyntaxContextId};
|
||||||
use tt::iter::TtIter;
|
use tt::iter::{TtElement, TtIter};
|
||||||
|
|
||||||
use crate::ParseError;
|
use crate::ParseError;
|
||||||
|
|
||||||
|
|
@ -29,14 +29,14 @@ pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
|
||||||
impl MetaTemplate {
|
impl MetaTemplate {
|
||||||
pub(crate) fn parse_pattern(
|
pub(crate) fn parse_pattern(
|
||||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||||
pattern: &tt::Subtree<Span>,
|
pattern: TtIter<'_, Span>,
|
||||||
) -> Result<Self, ParseError> {
|
) -> Result<Self, ParseError> {
|
||||||
MetaTemplate::parse(edition, pattern, Mode::Pattern)
|
MetaTemplate::parse(edition, pattern, Mode::Pattern)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_template(
|
pub(crate) fn parse_template(
|
||||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||||
template: &tt::Subtree<Span>,
|
template: TtIter<'_, Span>,
|
||||||
) -> Result<Self, ParseError> {
|
) -> Result<Self, ParseError> {
|
||||||
MetaTemplate::parse(edition, template, Mode::Template)
|
MetaTemplate::parse(edition, template, Mode::Template)
|
||||||
}
|
}
|
||||||
|
|
@ -47,13 +47,11 @@ impl MetaTemplate {
|
||||||
|
|
||||||
fn parse(
|
fn parse(
|
||||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||||
tt: &tt::Subtree<Span>,
|
mut src: TtIter<'_, Span>,
|
||||||
mode: Mode,
|
mode: Mode,
|
||||||
) -> Result<Self, ParseError> {
|
) -> Result<Self, ParseError> {
|
||||||
let mut src = TtIter::new(tt);
|
|
||||||
|
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
while let Some(first) = src.peek_n(0) {
|
while let Some(first) = src.peek() {
|
||||||
let op = next_op(edition, first, &mut src, mode)?;
|
let op = next_op(edition, first, &mut src, mode)?;
|
||||||
res.push(op);
|
res.push(op);
|
||||||
}
|
}
|
||||||
|
|
@ -182,12 +180,12 @@ enum Mode {
|
||||||
|
|
||||||
fn next_op(
|
fn next_op(
|
||||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||||
first_peeked: &tt::TokenTree<Span>,
|
first_peeked: TtElement<'_, Span>,
|
||||||
src: &mut TtIter<'_, Span>,
|
src: &mut TtIter<'_, Span>,
|
||||||
mode: Mode,
|
mode: Mode,
|
||||||
) -> Result<Op, ParseError> {
|
) -> Result<Op, ParseError> {
|
||||||
let res = match first_peeked {
|
let res = match first_peeked {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
|
TtElement::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
|
||||||
src.next().expect("first token already peeked");
|
src.next().expect("first token already peeked");
|
||||||
// Note that the '$' itself is a valid token inside macro_rules.
|
// Note that the '$' itself is a valid token inside macro_rules.
|
||||||
let second = match src.next() {
|
let second = match src.next() {
|
||||||
|
|
@ -201,18 +199,16 @@ fn next_op(
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
};
|
};
|
||||||
match second {
|
match second {
|
||||||
tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind {
|
TtElement::Subtree(subtree, mut subtree_iter) => match subtree.delimiter.kind {
|
||||||
tt::DelimiterKind::Parenthesis => {
|
tt::DelimiterKind::Parenthesis => {
|
||||||
let (separator, kind) = parse_repeat(src)?;
|
let (separator, kind) = parse_repeat(src)?;
|
||||||
let tokens = MetaTemplate::parse(edition, subtree, mode)?;
|
let tokens = MetaTemplate::parse(edition, subtree_iter, mode)?;
|
||||||
Op::Repeat { tokens, separator: separator.map(Arc::new), kind }
|
Op::Repeat { tokens, separator: separator.map(Arc::new), kind }
|
||||||
}
|
}
|
||||||
tt::DelimiterKind::Brace => match mode {
|
tt::DelimiterKind::Brace => match mode {
|
||||||
Mode::Template => {
|
Mode::Template => parse_metavar_expr(&mut subtree_iter).map_err(|()| {
|
||||||
parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| {
|
|
||||||
ParseError::unexpected("invalid metavariable expression")
|
ParseError::unexpected("invalid metavariable expression")
|
||||||
})?
|
})?,
|
||||||
}
|
|
||||||
Mode::Pattern => {
|
Mode::Pattern => {
|
||||||
return Err(ParseError::unexpected(
|
return Err(ParseError::unexpected(
|
||||||
"`${}` metavariable expressions are not allowed in matchers",
|
"`${}` metavariable expressions are not allowed in matchers",
|
||||||
|
|
@ -225,7 +221,7 @@ fn next_op(
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
tt::TokenTree::Leaf(leaf) => match leaf {
|
TtElement::Leaf(leaf) => match leaf {
|
||||||
tt::Leaf::Ident(ident) if ident.sym == sym::crate_ => {
|
tt::Leaf::Ident(ident) if ident.sym == sym::crate_ => {
|
||||||
// We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
|
// We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
|
||||||
Op::Ident(tt::Ident {
|
Op::Ident(tt::Ident {
|
||||||
|
|
@ -265,25 +261,25 @@ fn next_op(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => {
|
TtElement::Leaf(tt::Leaf::Literal(it)) => {
|
||||||
src.next().expect("first token already peeked");
|
src.next().expect("first token already peeked");
|
||||||
Op::Literal(it.clone())
|
Op::Literal(it.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(it)) => {
|
TtElement::Leaf(tt::Leaf::Ident(it)) => {
|
||||||
src.next().expect("first token already peeked");
|
src.next().expect("first token already peeked");
|
||||||
Op::Ident(it.clone())
|
Op::Ident(it.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => {
|
TtElement::Leaf(tt::Leaf::Punct(_)) => {
|
||||||
// There's at least one punct so this shouldn't fail.
|
// There's at least one punct so this shouldn't fail.
|
||||||
let puncts = src.expect_glued_punct().unwrap();
|
let puncts = src.expect_glued_punct().unwrap();
|
||||||
Op::Punct(Box::new(puncts))
|
Op::Punct(Box::new(puncts))
|
||||||
}
|
}
|
||||||
|
|
||||||
tt::TokenTree::Subtree(subtree) => {
|
TtElement::Subtree(subtree, subtree_iter) => {
|
||||||
src.next().expect("first token already peeked");
|
src.next().expect("first token already peeked");
|
||||||
let tokens = MetaTemplate::parse(edition, subtree, mode)?;
|
let tokens = MetaTemplate::parse(edition, subtree_iter, mode)?;
|
||||||
Op::Subtree { tokens, delimiter: subtree.delimiter }
|
Op::Subtree { tokens, delimiter: subtree.delimiter }
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -343,8 +339,8 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
|
||||||
let mut separator = Separator::Puncts(ArrayVec::new());
|
let mut separator = Separator::Puncts(ArrayVec::new());
|
||||||
for tt in src {
|
for tt in src {
|
||||||
let tt = match tt {
|
let tt = match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => leaf,
|
TtElement::Leaf(leaf) => leaf,
|
||||||
tt::TokenTree::Subtree(_) => return Err(ParseError::InvalidRepeat),
|
TtElement::Subtree(..) => return Err(ParseError::InvalidRepeat),
|
||||||
};
|
};
|
||||||
let has_sep = match &separator {
|
let has_sep = match &separator {
|
||||||
Separator::Puncts(puncts) => !puncts.is_empty(),
|
Separator::Puncts(puncts) => !puncts.is_empty(),
|
||||||
|
|
@ -378,37 +374,39 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
|
||||||
|
|
||||||
fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
|
fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
|
||||||
let func = src.expect_ident()?;
|
let func = src.expect_ident()?;
|
||||||
let args = src.expect_subtree()?;
|
let (args, mut args_iter) = src.expect_subtree()?;
|
||||||
|
|
||||||
if args.delimiter.kind != tt::DelimiterKind::Parenthesis {
|
if args.delimiter.kind != tt::DelimiterKind::Parenthesis {
|
||||||
return Err(());
|
return Err(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut args = TtIter::new(args);
|
|
||||||
|
|
||||||
let op = match &func.sym {
|
let op = match &func.sym {
|
||||||
s if sym::ignore == *s => {
|
s if sym::ignore == *s => {
|
||||||
args.expect_dollar()?;
|
args_iter.expect_dollar()?;
|
||||||
let ident = args.expect_ident()?;
|
let ident = args_iter.expect_ident()?;
|
||||||
Op::Ignore { name: ident.sym.clone(), id: ident.span }
|
Op::Ignore { name: ident.sym.clone(), id: ident.span }
|
||||||
}
|
}
|
||||||
s if sym::index == *s => Op::Index { depth: parse_depth(&mut args)? },
|
s if sym::index == *s => Op::Index { depth: parse_depth(&mut args_iter)? },
|
||||||
s if sym::len == *s => Op::Len { depth: parse_depth(&mut args)? },
|
s if sym::len == *s => Op::Len { depth: parse_depth(&mut args_iter)? },
|
||||||
s if sym::count == *s => {
|
s if sym::count == *s => {
|
||||||
args.expect_dollar()?;
|
args_iter.expect_dollar()?;
|
||||||
let ident = args.expect_ident()?;
|
let ident = args_iter.expect_ident()?;
|
||||||
let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None };
|
let depth = if try_eat_comma(&mut args_iter) {
|
||||||
|
Some(parse_depth(&mut args_iter)?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
Op::Count { name: ident.sym.clone(), depth }
|
Op::Count { name: ident.sym.clone(), depth }
|
||||||
}
|
}
|
||||||
s if sym::concat == *s => {
|
s if sym::concat == *s => {
|
||||||
let mut elements = Vec::new();
|
let mut elements = Vec::new();
|
||||||
while let Some(next) = args.peek_n(0) {
|
while let Some(next) = args_iter.peek() {
|
||||||
let element = if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = next {
|
let element = if let TtElement::Leaf(tt::Leaf::Literal(lit)) = next {
|
||||||
args.next().expect("already peeked");
|
args_iter.next().expect("already peeked");
|
||||||
ConcatMetaVarExprElem::Literal(lit.clone())
|
ConcatMetaVarExprElem::Literal(lit.clone())
|
||||||
} else {
|
} else {
|
||||||
let is_var = try_eat_dollar(&mut args);
|
let is_var = try_eat_dollar(&mut args_iter);
|
||||||
let ident = args.expect_ident_or_underscore()?.clone();
|
let ident = args_iter.expect_ident_or_underscore()?.clone();
|
||||||
|
|
||||||
if is_var {
|
if is_var {
|
||||||
ConcatMetaVarExprElem::Var(ident)
|
ConcatMetaVarExprElem::Var(ident)
|
||||||
|
|
@ -417,8 +415,8 @@ fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
elements.push(element);
|
elements.push(element);
|
||||||
if args.peek_n(0).is_some() {
|
if !args_iter.is_empty() {
|
||||||
args.expect_comma()?;
|
args_iter.expect_comma()?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if elements.len() < 2 {
|
if elements.len() < 2 {
|
||||||
|
|
@ -429,7 +427,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
|
||||||
_ => return Err(()),
|
_ => return Err(()),
|
||||||
};
|
};
|
||||||
|
|
||||||
if args.next().is_some() {
|
if args_iter.next().is_some() {
|
||||||
return Err(());
|
return Err(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -437,7 +435,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> {
|
fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> {
|
||||||
if src.len() == 0 {
|
if src.is_empty() {
|
||||||
Ok(0)
|
Ok(0)
|
||||||
} else if let tt::Leaf::Literal(tt::Literal { symbol: text, suffix: None, .. }) =
|
} else if let tt::Leaf::Literal(tt::Literal { symbol: text, suffix: None, .. }) =
|
||||||
src.expect_literal()?
|
src.expect_literal()?
|
||||||
|
|
@ -450,7 +448,7 @@ fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool {
|
fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool {
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) {
|
if let Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek() {
|
||||||
let _ = src.next();
|
let _ = src.next();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
@ -458,7 +456,7 @@ fn try_eat_comma(src: &mut TtIter<'_, Span>) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_eat_dollar(src: &mut TtIter<'_, Span>) -> bool {
|
fn try_eat_dollar(src: &mut TtIter<'_, Span>) -> bool {
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. }))) = src.peek_n(0) {
|
if let Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. }))) = src.peek() {
|
||||||
let _ = src.next();
|
let _ = src.next();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -160,11 +160,14 @@ type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str)
|
||||||
mod tests {
|
mod tests {
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize};
|
use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize};
|
||||||
use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
|
use tt::{
|
||||||
|
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, TopSubtree,
|
||||||
|
TopSubtreeBuilder,
|
||||||
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn fixture_token_tree() -> Subtree<Span> {
|
fn fixture_token_tree() -> TopSubtree<Span> {
|
||||||
let anchor = SpanAnchor {
|
let anchor = SpanAnchor {
|
||||||
file_id: span::EditionedFileId::new(
|
file_id: span::EditionedFileId::new(
|
||||||
span::FileId::from_raw(0xe4e4e),
|
span::FileId::from_raw(0xe4e4e),
|
||||||
|
|
@ -173,79 +176,7 @@ mod tests {
|
||||||
ast_id: ErasedFileAstId::from_raw(0),
|
ast_id: ErasedFileAstId::from_raw(0),
|
||||||
};
|
};
|
||||||
|
|
||||||
let token_trees = Box::new([
|
let mut builder = TopSubtreeBuilder::new(Delimiter {
|
||||||
TokenTree::Leaf(
|
|
||||||
Ident {
|
|
||||||
sym: Symbol::intern("struct"),
|
|
||||||
span: Span {
|
|
||||||
range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
|
|
||||||
anchor,
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
is_raw: tt::IdentIsRaw::No,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
TokenTree::Leaf(
|
|
||||||
Ident {
|
|
||||||
sym: Symbol::intern("Foo"),
|
|
||||||
span: Span {
|
|
||||||
range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")),
|
|
||||||
anchor,
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
is_raw: tt::IdentIsRaw::Yes,
|
|
||||||
}
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
TokenTree::Leaf(Leaf::Literal(Literal {
|
|
||||||
symbol: Symbol::intern("Foo"),
|
|
||||||
span: Span {
|
|
||||||
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
|
|
||||||
anchor,
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
kind: tt::LitKind::Str,
|
|
||||||
suffix: None,
|
|
||||||
})),
|
|
||||||
TokenTree::Leaf(Leaf::Punct(Punct {
|
|
||||||
char: '@',
|
|
||||||
span: Span {
|
|
||||||
range: TextRange::at(TextSize::new(13), TextSize::of('@')),
|
|
||||||
anchor,
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
spacing: Spacing::Joint,
|
|
||||||
})),
|
|
||||||
TokenTree::Subtree(Subtree {
|
|
||||||
delimiter: Delimiter {
|
|
||||||
open: Span {
|
|
||||||
range: TextRange::at(TextSize::new(14), TextSize::of('{')),
|
|
||||||
anchor,
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
close: Span {
|
|
||||||
range: TextRange::at(TextSize::new(19), TextSize::of('}')),
|
|
||||||
anchor,
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
kind: DelimiterKind::Brace,
|
|
||||||
},
|
|
||||||
token_trees: Box::new([TokenTree::Leaf(Leaf::Literal(Literal {
|
|
||||||
symbol: sym::INTEGER_0.clone(),
|
|
||||||
span: Span {
|
|
||||||
range: TextRange::at(TextSize::new(15), TextSize::of("0u32")),
|
|
||||||
anchor,
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
kind: tt::LitKind::Integer,
|
|
||||||
suffix: Some(sym::u32.clone()),
|
|
||||||
}))]),
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
|
|
||||||
Subtree {
|
|
||||||
delimiter: Delimiter {
|
|
||||||
open: Span {
|
open: Span {
|
||||||
range: TextRange::empty(TextSize::new(0)),
|
range: TextRange::empty(TextSize::new(0)),
|
||||||
anchor,
|
anchor,
|
||||||
|
|
@ -257,9 +188,76 @@ mod tests {
|
||||||
ctx: SyntaxContextId::ROOT,
|
ctx: SyntaxContextId::ROOT,
|
||||||
},
|
},
|
||||||
kind: DelimiterKind::Invisible,
|
kind: DelimiterKind::Invisible,
|
||||||
|
});
|
||||||
|
|
||||||
|
builder.push(
|
||||||
|
Ident {
|
||||||
|
sym: Symbol::intern("struct"),
|
||||||
|
span: Span {
|
||||||
|
range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
},
|
},
|
||||||
token_trees,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
}
|
}
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
builder.push(
|
||||||
|
Ident {
|
||||||
|
sym: Symbol::intern("Foo"),
|
||||||
|
span: Span {
|
||||||
|
range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
is_raw: tt::IdentIsRaw::Yes,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
builder.push(Leaf::Literal(Literal {
|
||||||
|
symbol: Symbol::intern("Foo"),
|
||||||
|
span: Span {
|
||||||
|
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
kind: tt::LitKind::Str,
|
||||||
|
suffix: None,
|
||||||
|
}));
|
||||||
|
builder.push(Leaf::Punct(Punct {
|
||||||
|
char: '@',
|
||||||
|
span: Span {
|
||||||
|
range: TextRange::at(TextSize::new(13), TextSize::of('@')),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
spacing: Spacing::Joint,
|
||||||
|
}));
|
||||||
|
builder.open(
|
||||||
|
DelimiterKind::Brace,
|
||||||
|
Span {
|
||||||
|
range: TextRange::at(TextSize::new(14), TextSize::of('{')),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
builder.push(Leaf::Literal(Literal {
|
||||||
|
symbol: sym::INTEGER_0.clone(),
|
||||||
|
span: Span {
|
||||||
|
range: TextRange::at(TextSize::new(15), TextSize::of("0u32")),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
kind: tt::LitKind::Integer,
|
||||||
|
suffix: Some(sym::u32.clone()),
|
||||||
|
}));
|
||||||
|
builder.close(Span {
|
||||||
|
range: TextRange::at(TextSize::new(19), TextSize::of('}')),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
});
|
||||||
|
|
||||||
|
builder.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -269,7 +267,7 @@ mod tests {
|
||||||
let mut span_data_table = Default::default();
|
let mut span_data_table = Default::default();
|
||||||
let task = ExpandMacro {
|
let task = ExpandMacro {
|
||||||
data: ExpandMacroData {
|
data: ExpandMacroData {
|
||||||
macro_body: FlatTree::new(&tt, v, &mut span_data_table),
|
macro_body: FlatTree::new(tt.view(), v, &mut span_data_table),
|
||||||
macro_name: Default::default(),
|
macro_name: Default::default(),
|
||||||
attributes: None,
|
attributes: None,
|
||||||
has_global_spans: ExpnGlobals {
|
has_global_spans: ExpnGlobals {
|
||||||
|
|
@ -289,9 +287,8 @@ mod tests {
|
||||||
// println!("{}", json);
|
// println!("{}", json);
|
||||||
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
|
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert!(
|
||||||
tt,
|
tt == back.data.macro_body.to_subtree_resolved(v, &span_data_table),
|
||||||
back.data.macro_body.to_subtree_resolved(v, &span_data_table),
|
|
||||||
"version: {v}"
|
"version: {v}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,13 @@
|
||||||
//! Serialization-friendly representation of `tt::Subtree`.
|
//! Serialization-friendly representation of `tt::TopSubtree`.
|
||||||
//!
|
//!
|
||||||
//! It is possible to serialize `Subtree` as is, as a tree, but using
|
//! It is possible to serialize `TopSubtree` recursively, as a tree, but using
|
||||||
//! arbitrary-nested trees in JSON is problematic, as they can cause the JSON
|
//! arbitrary-nested trees in JSON is problematic, as they can cause the JSON
|
||||||
//! parser to overflow the stack.
|
//! parser to overflow the stack.
|
||||||
//!
|
//!
|
||||||
//! Additionally, such implementation would be pretty verbose, and we do care
|
//! Additionally, such implementation would be pretty verbose, and we do care
|
||||||
//! about performance here a bit.
|
//! about performance here a bit.
|
||||||
//!
|
//!
|
||||||
//! So what this module does is dumping a `tt::Subtree` into a bunch of flat
|
//! So what this module does is dumping a `tt::TopSubtree` into a bunch of flat
|
||||||
//! array of numbers. See the test in the parent module to get an example
|
//! array of numbers. See the test in the parent module to get an example
|
||||||
//! output.
|
//! output.
|
||||||
//!
|
//!
|
||||||
|
|
@ -118,7 +118,7 @@ struct IdentRepr {
|
||||||
|
|
||||||
impl FlatTree {
|
impl FlatTree {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
subtree: &tt::Subtree<Span>,
|
subtree: tt::SubtreeView<'_, Span>,
|
||||||
version: u32,
|
version: u32,
|
||||||
span_data_table: &mut SpanDataIndexMap,
|
span_data_table: &mut SpanDataIndexMap,
|
||||||
) -> FlatTree {
|
) -> FlatTree {
|
||||||
|
|
@ -159,7 +159,7 @@ impl FlatTree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
|
pub fn new_raw(subtree: tt::SubtreeView<'_, TokenId>, version: u32) -> FlatTree {
|
||||||
let mut w = Writer {
|
let mut w = Writer {
|
||||||
string_table: FxHashMap::default(),
|
string_table: FxHashMap::default(),
|
||||||
work: VecDeque::new(),
|
work: VecDeque::new(),
|
||||||
|
|
@ -201,7 +201,7 @@ impl FlatTree {
|
||||||
self,
|
self,
|
||||||
version: u32,
|
version: u32,
|
||||||
span_data_table: &SpanDataIndexMap,
|
span_data_table: &SpanDataIndexMap,
|
||||||
) -> tt::Subtree<Span> {
|
) -> tt::TopSubtree<Span> {
|
||||||
Reader {
|
Reader {
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
||||||
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
||||||
|
|
@ -227,7 +227,7 @@ impl FlatTree {
|
||||||
.read()
|
.read()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
|
pub fn to_subtree_unresolved(self, version: u32) -> tt::TopSubtree<TokenId> {
|
||||||
Reader {
|
Reader {
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
||||||
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
||||||
|
|
@ -381,7 +381,7 @@ impl InternableSpan for Span {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Writer<'a, 'span, S: InternableSpan> {
|
struct Writer<'a, 'span, S: InternableSpan> {
|
||||||
work: VecDeque<(usize, &'a tt::Subtree<S>)>,
|
work: VecDeque<(usize, tt::iter::TtIter<'a, S>)>,
|
||||||
string_table: FxHashMap<std::borrow::Cow<'a, str>, u32>,
|
string_table: FxHashMap<std::borrow::Cow<'a, str>, u32>,
|
||||||
span_data_table: &'span mut S::Table,
|
span_data_table: &'span mut S::Table,
|
||||||
version: u32,
|
version: u32,
|
||||||
|
|
@ -395,8 +395,9 @@ struct Writer<'a, 'span, S: InternableSpan> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, S: InternableSpan> Writer<'a, '_, S> {
|
impl<'a, S: InternableSpan> Writer<'a, '_, S> {
|
||||||
fn write(&mut self, root: &'a tt::Subtree<S>) {
|
fn write(&mut self, root: tt::SubtreeView<'a, S>) {
|
||||||
self.enqueue(root);
|
let subtree = root.top_subtree();
|
||||||
|
self.enqueue(subtree, root.iter());
|
||||||
while let Some((idx, subtree)) = self.work.pop_front() {
|
while let Some((idx, subtree)) = self.work.pop_front() {
|
||||||
self.subtree(idx, subtree);
|
self.subtree(idx, subtree);
|
||||||
}
|
}
|
||||||
|
|
@ -406,20 +407,20 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> {
|
||||||
S::token_id_of(self.span_data_table, span)
|
S::token_id_of(self.span_data_table, span)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
|
fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, S>) {
|
||||||
let mut first_tt = self.token_tree.len();
|
let mut first_tt = self.token_tree.len();
|
||||||
let n_tt = subtree.token_trees.len();
|
let n_tt = subtree.clone().count(); // FIXME: `count()` walks over the entire iterator.
|
||||||
self.token_tree.resize(first_tt + n_tt, !0);
|
self.token_tree.resize(first_tt + n_tt, !0);
|
||||||
|
|
||||||
self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
|
self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
|
||||||
|
|
||||||
for child in subtree.token_trees.iter() {
|
for child in subtree {
|
||||||
let idx_tag = match child {
|
let idx_tag = match child {
|
||||||
tt::TokenTree::Subtree(it) => {
|
tt::iter::TtElement::Subtree(subtree, subtree_iter) => {
|
||||||
let idx = self.enqueue(it);
|
let idx = self.enqueue(subtree, subtree_iter);
|
||||||
idx << 2
|
idx << 2
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(leaf) => match leaf {
|
tt::iter::TtElement::Leaf(leaf) => match leaf {
|
||||||
tt::Leaf::Literal(lit) => {
|
tt::Leaf::Literal(lit) => {
|
||||||
let idx = self.literal.len() as u32;
|
let idx = self.literal.len() as u32;
|
||||||
let id = self.token_id_of(lit.span);
|
let id = self.token_id_of(lit.span);
|
||||||
|
|
@ -477,13 +478,13 @@ impl<'a, S: InternableSpan> Writer<'a, '_, S> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
|
fn enqueue(&mut self, subtree: &'a tt::Subtree<S>, contents: tt::iter::TtIter<'a, S>) -> u32 {
|
||||||
let idx = self.subtree.len();
|
let idx = self.subtree.len();
|
||||||
let open = self.token_id_of(subtree.delimiter.open);
|
let open = self.token_id_of(subtree.delimiter.open);
|
||||||
let close = self.token_id_of(subtree.delimiter.close);
|
let close = self.token_id_of(subtree.delimiter.close);
|
||||||
let delimiter_kind = subtree.delimiter.kind;
|
let delimiter_kind = subtree.delimiter.kind;
|
||||||
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
|
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
|
||||||
self.work.push_back((idx, subtree));
|
self.work.push_back((idx, contents));
|
||||||
idx as u32
|
idx as u32
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -518,33 +519,39 @@ struct Reader<'span, S: InternableSpan> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: InternableSpan> Reader<'_, S> {
|
impl<S: InternableSpan> Reader<'_, S> {
|
||||||
pub(crate) fn read(self) -> tt::Subtree<S> {
|
pub(crate) fn read(self) -> tt::TopSubtree<S> {
|
||||||
let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
|
let mut res: Vec<Option<(tt::Delimiter<S>, Vec<tt::TokenTree<S>>)>> =
|
||||||
|
vec![None; self.subtree.len()];
|
||||||
let read_span = |id| S::span_for_token_id(self.span_data_table, id);
|
let read_span = |id| S::span_for_token_id(self.span_data_table, id);
|
||||||
for i in (0..self.subtree.len()).rev() {
|
for i in (0..self.subtree.len()).rev() {
|
||||||
let repr = &self.subtree[i];
|
let repr = &self.subtree[i];
|
||||||
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
|
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
|
||||||
let s = tt::Subtree {
|
let delimiter = tt::Delimiter {
|
||||||
delimiter: tt::Delimiter {
|
|
||||||
open: read_span(repr.open),
|
open: read_span(repr.open),
|
||||||
close: read_span(repr.close),
|
close: read_span(repr.close),
|
||||||
kind: repr.kind,
|
kind: repr.kind,
|
||||||
},
|
};
|
||||||
token_trees: token_trees
|
let mut s = Vec::new();
|
||||||
.iter()
|
for &idx_tag in token_trees {
|
||||||
.copied()
|
|
||||||
.map(|idx_tag| {
|
|
||||||
let tag = idx_tag & 0b11;
|
let tag = idx_tag & 0b11;
|
||||||
let idx = (idx_tag >> 2) as usize;
|
let idx = (idx_tag >> 2) as usize;
|
||||||
match tag {
|
match tag {
|
||||||
// XXX: we iterate subtrees in reverse to guarantee
|
// XXX: we iterate subtrees in reverse to guarantee
|
||||||
// that this unwrap doesn't fire.
|
// that this unwrap doesn't fire.
|
||||||
0b00 => res[idx].take().unwrap().into(),
|
0b00 => {
|
||||||
|
let (delimiter, subtree) = res[idx].take().unwrap();
|
||||||
|
s.push(tt::TokenTree::Subtree(tt::Subtree {
|
||||||
|
delimiter,
|
||||||
|
len: subtree.len() as u32,
|
||||||
|
}));
|
||||||
|
s.extend(subtree)
|
||||||
|
}
|
||||||
0b01 => {
|
0b01 => {
|
||||||
use tt::LitKind::*;
|
use tt::LitKind::*;
|
||||||
let repr = &self.literal[idx];
|
let repr = &self.literal[idx];
|
||||||
let text = self.text[repr.text as usize].as_str();
|
let text = self.text[repr.text as usize].as_str();
|
||||||
let span = read_span(repr.id);
|
let span = read_span(repr.id);
|
||||||
|
s.push(
|
||||||
tt::Leaf::Literal(if self.version >= EXTENDED_LEAF_DATA {
|
tt::Leaf::Literal(if self.version >= EXTENDED_LEAF_DATA {
|
||||||
tt::Literal {
|
tt::Literal {
|
||||||
symbol: Symbol::intern(text),
|
symbol: Symbol::intern(text),
|
||||||
|
|
@ -574,47 +581,48 @@ impl<S: InternableSpan> Reader<'_, S> {
|
||||||
} else {
|
} else {
|
||||||
tt::token_to_literal(text, span)
|
tt::token_to_literal(text, span)
|
||||||
})
|
})
|
||||||
.into()
|
.into(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
0b10 => {
|
0b10 => {
|
||||||
let repr = &self.punct[idx];
|
let repr = &self.punct[idx];
|
||||||
|
s.push(
|
||||||
tt::Leaf::Punct(tt::Punct {
|
tt::Leaf::Punct(tt::Punct {
|
||||||
char: repr.char,
|
char: repr.char,
|
||||||
spacing: repr.spacing,
|
spacing: repr.spacing,
|
||||||
span: read_span(repr.id),
|
span: read_span(repr.id),
|
||||||
})
|
})
|
||||||
.into()
|
.into(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
0b11 => {
|
0b11 => {
|
||||||
let repr = &self.ident[idx];
|
let repr = &self.ident[idx];
|
||||||
let text = self.text[repr.text as usize].as_str();
|
let text = self.text[repr.text as usize].as_str();
|
||||||
let (is_raw, text) = if self.version >= EXTENDED_LEAF_DATA {
|
let (is_raw, text) = if self.version >= EXTENDED_LEAF_DATA {
|
||||||
(
|
(
|
||||||
if repr.is_raw {
|
if repr.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
|
||||||
tt::IdentIsRaw::Yes
|
|
||||||
} else {
|
|
||||||
tt::IdentIsRaw::No
|
|
||||||
},
|
|
||||||
text,
|
text,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
tt::IdentIsRaw::split_from_symbol(text)
|
tt::IdentIsRaw::split_from_symbol(text)
|
||||||
};
|
};
|
||||||
|
s.push(
|
||||||
tt::Leaf::Ident(tt::Ident {
|
tt::Leaf::Ident(tt::Ident {
|
||||||
sym: Symbol::intern(text),
|
sym: Symbol::intern(text),
|
||||||
span: read_span(repr.id),
|
span: read_span(repr.id),
|
||||||
is_raw,
|
is_raw,
|
||||||
})
|
})
|
||||||
.into()
|
.into(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
other => panic!("bad tag: {other}"),
|
other => panic!("bad tag: {other}"),
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
.collect(),
|
res[i] = Some((delimiter, s));
|
||||||
};
|
|
||||||
res[i] = Some(s);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res[0].take().unwrap()
|
let (delimiter, mut res) = res[0].take().unwrap();
|
||||||
|
res.insert(0, tt::TokenTree::Subtree(tt::Subtree { delimiter, len: res.len() as u32 }));
|
||||||
|
tt::TopSubtree(res.into_boxed_slice())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -146,14 +146,14 @@ impl ProcMacro {
|
||||||
|
|
||||||
pub fn expand(
|
pub fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &tt::Subtree<Span>,
|
subtree: tt::SubtreeView<'_, Span>,
|
||||||
attr: Option<&tt::Subtree<Span>>,
|
attr: Option<tt::SubtreeView<'_, Span>>,
|
||||||
env: Vec<(String, String)>,
|
env: Vec<(String, String)>,
|
||||||
def_site: Span,
|
def_site: Span,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
mixed_site: Span,
|
mixed_site: Span,
|
||||||
current_dir: Option<String>,
|
current_dir: Option<String>,
|
||||||
) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> {
|
) -> Result<Result<tt::TopSubtree<Span>, PanicMessage>, ServerError> {
|
||||||
let version = self.process.version();
|
let version = self.process.version();
|
||||||
|
|
||||||
let mut span_data_table = SpanDataIndexMap::default();
|
let mut span_data_table = SpanDataIndexMap::default();
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ rust-version.workspace = true
|
||||||
[dependencies]
|
[dependencies]
|
||||||
proc-macro-srv.workspace = true
|
proc-macro-srv.workspace = true
|
||||||
proc-macro-api.workspace = true
|
proc-macro-api.workspace = true
|
||||||
|
tt.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
sysroot-abi = ["proc-macro-srv/sysroot-abi"]
|
sysroot-abi = ["proc-macro-srv/sysroot-abi"]
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,9 @@ pub(crate) fn run() -> io::Result<()> {
|
||||||
call_site,
|
call_site,
|
||||||
mixed_site,
|
mixed_site,
|
||||||
)
|
)
|
||||||
.map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
|
.map(|it| {
|
||||||
|
msg::FlatTree::new_raw(tt::SubtreeView::new(&it), CURRENT_API_VERSION)
|
||||||
|
})
|
||||||
.map_err(msg::PanicMessage)
|
.map_err(msg::PanicMessage)
|
||||||
}),
|
}),
|
||||||
SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended({
|
SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended({
|
||||||
|
|
@ -103,7 +105,11 @@ pub(crate) fn run() -> io::Result<()> {
|
||||||
)
|
)
|
||||||
.map(|it| {
|
.map(|it| {
|
||||||
(
|
(
|
||||||
msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table),
|
msg::FlatTree::new(
|
||||||
|
tt::SubtreeView::new(&it),
|
||||||
|
CURRENT_API_VERSION,
|
||||||
|
&mut span_data_table,
|
||||||
|
),
|
||||||
serialize_span_data_index_map(&span_data_table),
|
serialize_span_data_index_map(&span_data_table),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ use libloading::Library;
|
||||||
use object::Object;
|
use object::Object;
|
||||||
use paths::{Utf8Path, Utf8PathBuf};
|
use paths::{Utf8Path, Utf8PathBuf};
|
||||||
|
|
||||||
use crate::{proc_macros::ProcMacros, ProcMacroKind, ProcMacroSrvSpan};
|
use crate::{proc_macros::ProcMacros, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan};
|
||||||
|
|
||||||
/// Loads dynamic library in platform dependent manner.
|
/// Loads dynamic library in platform dependent manner.
|
||||||
///
|
///
|
||||||
|
|
@ -125,12 +125,12 @@ impl Expander {
|
||||||
pub(crate) fn expand<S: ProcMacroSrvSpan>(
|
pub(crate) fn expand<S: ProcMacroSrvSpan>(
|
||||||
&self,
|
&self,
|
||||||
macro_name: &str,
|
macro_name: &str,
|
||||||
macro_body: tt::Subtree<S>,
|
macro_body: TopSubtree<S>,
|
||||||
attributes: Option<tt::Subtree<S>>,
|
attributes: Option<TopSubtree<S>>,
|
||||||
def_site: S,
|
def_site: S,
|
||||||
call_site: S,
|
call_site: S,
|
||||||
mixed_site: S,
|
mixed_site: S,
|
||||||
) -> Result<tt::Subtree<S>, String>
|
) -> Result<TopSubtree<S>, String>
|
||||||
where
|
where
|
||||||
<S::Server as bridge::server::Types>::TokenStream: Default,
|
<S::Server as bridge::server::Types>::TokenStream: Default,
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -72,12 +72,12 @@ impl ProcMacroSrv<'_> {
|
||||||
env: Vec<(String, String)>,
|
env: Vec<(String, String)>,
|
||||||
current_dir: Option<impl AsRef<Path>>,
|
current_dir: Option<impl AsRef<Path>>,
|
||||||
macro_name: String,
|
macro_name: String,
|
||||||
macro_body: tt::Subtree<S>,
|
macro_body: tt::TopSubtree<S>,
|
||||||
attribute: Option<tt::Subtree<S>>,
|
attribute: Option<tt::TopSubtree<S>>,
|
||||||
def_site: S,
|
def_site: S,
|
||||||
call_site: S,
|
call_site: S,
|
||||||
mixed_site: S,
|
mixed_site: S,
|
||||||
) -> Result<tt::Subtree<S>, String> {
|
) -> Result<Vec<tt::TokenTree<S>>, String> {
|
||||||
let snapped_env = self.env;
|
let snapped_env = self.env;
|
||||||
let expander =
|
let expander =
|
||||||
self.expander(lib.as_ref()).map_err(|err| format!("failed to load macro: {err}"))?;
|
self.expander(lib.as_ref()).map_err(|err| format!("failed to load macro: {err}"))?;
|
||||||
|
|
@ -91,14 +91,16 @@ impl ProcMacroSrv<'_> {
|
||||||
.stack_size(EXPANDER_STACK_SIZE)
|
.stack_size(EXPANDER_STACK_SIZE)
|
||||||
.name(macro_name.clone())
|
.name(macro_name.clone())
|
||||||
.spawn_scoped(s, move || {
|
.spawn_scoped(s, move || {
|
||||||
expander.expand(
|
expander
|
||||||
|
.expand(
|
||||||
¯o_name,
|
¯o_name,
|
||||||
macro_body,
|
server_impl::TopSubtree(macro_body.0.into_vec()),
|
||||||
attribute,
|
attribute.map(|it| server_impl::TopSubtree(it.0.into_vec())),
|
||||||
def_site,
|
def_site,
|
||||||
call_site,
|
call_site,
|
||||||
mixed_site,
|
mixed_site,
|
||||||
)
|
)
|
||||||
|
.map(|tt| tt.0)
|
||||||
});
|
});
|
||||||
let res = match thread {
|
let res = match thread {
|
||||||
Ok(handle) => handle.join(),
|
Ok(handle) => handle.join(),
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,9 @@ use proc_macro::bridge;
|
||||||
|
|
||||||
use libloading::Library;
|
use libloading::Library;
|
||||||
|
|
||||||
use crate::{dylib::LoadProcMacroDylibError, ProcMacroKind, ProcMacroSrvSpan};
|
use crate::{
|
||||||
|
dylib::LoadProcMacroDylibError, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan,
|
||||||
|
};
|
||||||
|
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
|
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
|
||||||
|
|
@ -41,12 +43,12 @@ impl ProcMacros {
|
||||||
pub(crate) fn expand<S: ProcMacroSrvSpan>(
|
pub(crate) fn expand<S: ProcMacroSrvSpan>(
|
||||||
&self,
|
&self,
|
||||||
macro_name: &str,
|
macro_name: &str,
|
||||||
macro_body: tt::Subtree<S>,
|
macro_body: TopSubtree<S>,
|
||||||
attributes: Option<tt::Subtree<S>>,
|
attributes: Option<TopSubtree<S>>,
|
||||||
def_site: S,
|
def_site: S,
|
||||||
call_site: S,
|
call_site: S,
|
||||||
mixed_site: S,
|
mixed_site: S,
|
||||||
) -> Result<tt::Subtree<S>, crate::PanicMessage> {
|
) -> Result<TopSubtree<S>, crate::PanicMessage> {
|
||||||
let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
|
let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
|
||||||
|
|
||||||
let parsed_attributes = attributes
|
let parsed_attributes = attributes
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,8 @@
|
||||||
//!
|
//!
|
||||||
//! FIXME: No span and source file information is implemented yet
|
//! FIXME: No span and source file information is implemented yet
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
use proc_macro::bridge;
|
use proc_macro::bridge;
|
||||||
|
|
||||||
mod token_stream;
|
mod token_stream;
|
||||||
|
|
@ -19,6 +21,32 @@ pub mod token_id;
|
||||||
// pub use symbol::*;
|
// pub use symbol::*;
|
||||||
use tt::Spacing;
|
use tt::Spacing;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) struct TopSubtree<S>(pub(crate) Vec<tt::TokenTree<S>>);
|
||||||
|
|
||||||
|
impl<S: Copy + fmt::Debug> fmt::Debug for TopSubtree<S> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
fmt::Debug::fmt(&tt::TokenTreesView::new(&self.0), f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Copy> TopSubtree<S> {
|
||||||
|
pub(crate) fn top_subtree(&self) -> &tt::Subtree<S> {
|
||||||
|
let tt::TokenTree::Subtree(subtree) = &self.0[0] else {
|
||||||
|
unreachable!("the first token tree is always the top subtree");
|
||||||
|
};
|
||||||
|
subtree
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn from_bridge(group: bridge::Group<TokenStream<S>, S>) -> Self {
|
||||||
|
let delimiter = delim_to_internal(group.delimiter, group.span);
|
||||||
|
let mut tts =
|
||||||
|
group.stream.map(|it| it.token_trees).unwrap_or_else(|| Vec::with_capacity(1));
|
||||||
|
tts.insert(0, tt::TokenTree::Subtree(tt::Subtree { delimiter, len: tts.len() as u32 }));
|
||||||
|
TopSubtree(tts)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {
|
fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {
|
||||||
let kind = match d {
|
let kind = match d {
|
||||||
proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
|
proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@
|
||||||
//! change their representation to be compatible with rust-analyzer's.
|
//! change their representation to be compatible with rust-analyzer's.
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
iter,
|
|
||||||
ops::{Bound, Range},
|
ops::{Bound, Range},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -15,14 +14,10 @@ use proc_macro::bridge::{self, server};
|
||||||
use span::{FileId, Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
|
use span::{FileId, Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
|
||||||
use tt::{TextRange, TextSize};
|
use tt::{TextRange, TextSize};
|
||||||
|
|
||||||
use crate::server_impl::{
|
use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree};
|
||||||
delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal,
|
|
||||||
token_stream::TokenStreamBuilder,
|
|
||||||
};
|
|
||||||
mod tt {
|
mod tt {
|
||||||
pub use tt::*;
|
pub use tt::*;
|
||||||
|
|
||||||
pub type Subtree = ::tt::Subtree<super::Span>;
|
|
||||||
pub type TokenTree = ::tt::TokenTree<super::Span>;
|
pub type TokenTree = ::tt::TokenTree<super::Span>;
|
||||||
pub type Leaf = ::tt::Leaf<super::Span>;
|
pub type Leaf = ::tt::Leaf<super::Span>;
|
||||||
pub type Literal = ::tt::Literal<super::Span>;
|
pub type Literal = ::tt::Literal<super::Span>;
|
||||||
|
|
@ -161,15 +156,8 @@ impl server::TokenStream for RaSpanServer {
|
||||||
) -> Self::TokenStream {
|
) -> Self::TokenStream {
|
||||||
match tree {
|
match tree {
|
||||||
bridge::TokenTree::Group(group) => {
|
bridge::TokenTree::Group(group) => {
|
||||||
let group = tt::Subtree {
|
let group = TopSubtree::from_bridge(group);
|
||||||
delimiter: delim_to_internal(group.delimiter, group.span),
|
TokenStream { token_trees: group.0 }
|
||||||
token_trees: match group.stream {
|
|
||||||
Some(stream) => stream.into_iter().collect(),
|
|
||||||
None => Box::new([]),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let tree = tt::TokenTree::from(group);
|
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bridge::TokenTree::Ident(ident) => {
|
bridge::TokenTree::Ident(ident) => {
|
||||||
|
|
@ -181,7 +169,7 @@ impl server::TokenStream for RaSpanServer {
|
||||||
};
|
};
|
||||||
let leaf = tt::Leaf::from(ident);
|
let leaf = tt::Leaf::from(ident);
|
||||||
let tree = tt::TokenTree::from(leaf);
|
let tree = tt::TokenTree::from(leaf);
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
TokenStream { token_trees: vec![tree] }
|
||||||
}
|
}
|
||||||
|
|
||||||
bridge::TokenTree::Literal(literal) => {
|
bridge::TokenTree::Literal(literal) => {
|
||||||
|
|
@ -194,7 +182,7 @@ impl server::TokenStream for RaSpanServer {
|
||||||
|
|
||||||
let leaf: tt::Leaf = tt::Leaf::from(literal);
|
let leaf: tt::Leaf = tt::Leaf::from(literal);
|
||||||
let tree = tt::TokenTree::from(leaf);
|
let tree = tt::TokenTree::from(leaf);
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
TokenStream { token_trees: vec![tree] }
|
||||||
}
|
}
|
||||||
|
|
||||||
bridge::TokenTree::Punct(p) => {
|
bridge::TokenTree::Punct(p) => {
|
||||||
|
|
@ -205,7 +193,7 @@ impl server::TokenStream for RaSpanServer {
|
||||||
};
|
};
|
||||||
let leaf = tt::Leaf::from(punct);
|
let leaf = tt::Leaf::from(punct);
|
||||||
let tree = tt::TokenTree::from(leaf);
|
let tree = tt::TokenTree::from(leaf);
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
TokenStream { token_trees: vec![tree] }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -253,42 +241,7 @@ impl server::TokenStream for RaSpanServer {
|
||||||
&mut self,
|
&mut self,
|
||||||
stream: Self::TokenStream,
|
stream: Self::TokenStream,
|
||||||
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
|
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
|
||||||
stream
|
stream.into_bridge()
|
||||||
.into_iter()
|
|
||||||
.map(|tree| match tree {
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
|
|
||||||
bridge::TokenTree::Ident(bridge::Ident {
|
|
||||||
sym: ident.sym,
|
|
||||||
is_raw: ident.is_raw.yes(),
|
|
||||||
span: ident.span,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
|
||||||
bridge::TokenTree::Literal(bridge::Literal {
|
|
||||||
span: lit.span,
|
|
||||||
kind: literal_kind_to_external(lit.kind),
|
|
||||||
symbol: lit.symbol,
|
|
||||||
suffix: lit.suffix,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
|
|
||||||
bridge::TokenTree::Punct(bridge::Punct {
|
|
||||||
ch: punct.char as u8,
|
|
||||||
joint: punct.spacing == tt::Spacing::Joint,
|
|
||||||
span: punct.span,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
|
|
||||||
delimiter: delim_to_external(subtree.delimiter),
|
|
||||||
stream: if subtree.token_trees.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(subtree.token_trees.into_vec().into_iter().collect())
|
|
||||||
},
|
|
||||||
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -507,13 +460,14 @@ mod tests {
|
||||||
close: span,
|
close: span,
|
||||||
kind: tt::DelimiterKind::Brace,
|
kind: tt::DelimiterKind::Brace,
|
||||||
},
|
},
|
||||||
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
len: 1,
|
||||||
|
}),
|
||||||
|
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
kind: tt::LitKind::Str,
|
kind: tt::LitKind::Str,
|
||||||
symbol: Symbol::intern("string"),
|
symbol: Symbol::intern("string"),
|
||||||
suffix: None,
|
suffix: None,
|
||||||
span,
|
span,
|
||||||
}))]),
|
})),
|
||||||
}),
|
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -530,31 +484,34 @@ mod tests {
|
||||||
},
|
},
|
||||||
ctx: SyntaxContextId::ROOT,
|
ctx: SyntaxContextId::ROOT,
|
||||||
};
|
};
|
||||||
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
|
let subtree_paren_a = vec![
|
||||||
|
tt::TokenTree::Subtree(tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
open: span,
|
open: span,
|
||||||
close: span,
|
close: span,
|
||||||
kind: tt::DelimiterKind::Parenthesis,
|
kind: tt::DelimiterKind::Parenthesis,
|
||||||
},
|
},
|
||||||
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
len: 1,
|
||||||
|
}),
|
||||||
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
sym: Symbol::intern("a"),
|
sym: Symbol::intern("a"),
|
||||||
span,
|
span,
|
||||||
}))]),
|
})),
|
||||||
});
|
];
|
||||||
|
|
||||||
let t1 = TokenStream::from_str("(a)", span).unwrap();
|
let t1 = TokenStream::from_str("(a)", span).unwrap();
|
||||||
assert_eq!(t1.token_trees.len(), 1);
|
assert_eq!(t1.token_trees.len(), 2);
|
||||||
assert_eq!(t1.token_trees[0], subtree_paren_a);
|
assert!(t1.token_trees == subtree_paren_a);
|
||||||
|
|
||||||
let t2 = TokenStream::from_str("(a);", span).unwrap();
|
let t2 = TokenStream::from_str("(a);", span).unwrap();
|
||||||
assert_eq!(t2.token_trees.len(), 2);
|
assert_eq!(t2.token_trees.len(), 3);
|
||||||
assert_eq!(t2.token_trees[0], subtree_paren_a);
|
assert!(t2.token_trees[0..2] == subtree_paren_a);
|
||||||
|
|
||||||
let underscore = TokenStream::from_str("_", span).unwrap();
|
let underscore = TokenStream::from_str("_", span).unwrap();
|
||||||
assert_eq!(
|
assert!(
|
||||||
underscore.token_trees[0],
|
underscore.token_trees[0]
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
== tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
sym: Symbol::intern("_"),
|
sym: Symbol::intern("_"),
|
||||||
span,
|
span,
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
|
|
|
||||||
|
|
@ -1,30 +1,22 @@
|
||||||
//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span.
|
//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span.
|
||||||
//! This backend is rather inflexible, used by RustRover and older rust-analyzer versions.
|
//! This backend is rather inflexible, used by RustRover and older rust-analyzer versions.
|
||||||
use std::{
|
use std::ops::{Bound, Range};
|
||||||
iter,
|
|
||||||
ops::{Bound, Range},
|
|
||||||
};
|
|
||||||
|
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use proc_macro::bridge::{self, server};
|
use proc_macro::bridge::{self, server};
|
||||||
|
|
||||||
use crate::server_impl::{
|
use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree};
|
||||||
delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal,
|
|
||||||
token_stream::TokenStreamBuilder,
|
|
||||||
};
|
|
||||||
mod tt {
|
mod tt {
|
||||||
pub use span::TokenId;
|
pub use span::TokenId;
|
||||||
|
|
||||||
pub use tt::*;
|
pub use tt::*;
|
||||||
|
|
||||||
pub type Subtree = ::tt::Subtree<TokenId>;
|
|
||||||
pub type TokenTree = ::tt::TokenTree<TokenId>;
|
pub type TokenTree = ::tt::TokenTree<TokenId>;
|
||||||
pub type Leaf = ::tt::Leaf<TokenId>;
|
pub type Leaf = ::tt::Leaf<TokenId>;
|
||||||
pub type Literal = ::tt::Literal<TokenId>;
|
pub type Literal = ::tt::Literal<TokenId>;
|
||||||
pub type Punct = ::tt::Punct<TokenId>;
|
pub type Punct = ::tt::Punct<TokenId>;
|
||||||
pub type Ident = ::tt::Ident<TokenId>;
|
pub type Ident = ::tt::Ident<TokenId>;
|
||||||
}
|
}
|
||||||
type Group = tt::Subtree;
|
|
||||||
type TokenTree = tt::TokenTree;
|
type TokenTree = tt::TokenTree;
|
||||||
type Punct = tt::Punct;
|
type Punct = tt::Punct;
|
||||||
type Spacing = tt::Spacing;
|
type Spacing = tt::Spacing;
|
||||||
|
|
@ -148,15 +140,8 @@ impl server::TokenStream for TokenIdServer {
|
||||||
) -> Self::TokenStream {
|
) -> Self::TokenStream {
|
||||||
match tree {
|
match tree {
|
||||||
bridge::TokenTree::Group(group) => {
|
bridge::TokenTree::Group(group) => {
|
||||||
let group = Group {
|
let group = TopSubtree::from_bridge(group);
|
||||||
delimiter: delim_to_internal(group.delimiter, group.span),
|
TokenStream { token_trees: group.0 }
|
||||||
token_trees: match group.stream {
|
|
||||||
Some(stream) => stream.into_iter().collect(),
|
|
||||||
None => Box::new([]),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let tree = TokenTree::from(group);
|
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bridge::TokenTree::Ident(ident) => {
|
bridge::TokenTree::Ident(ident) => {
|
||||||
|
|
@ -167,7 +152,7 @@ impl server::TokenStream for TokenIdServer {
|
||||||
};
|
};
|
||||||
let leaf = tt::Leaf::from(ident);
|
let leaf = tt::Leaf::from(ident);
|
||||||
let tree = TokenTree::from(leaf);
|
let tree = TokenTree::from(leaf);
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
TokenStream { token_trees: vec![tree] }
|
||||||
}
|
}
|
||||||
|
|
||||||
bridge::TokenTree::Literal(literal) => {
|
bridge::TokenTree::Literal(literal) => {
|
||||||
|
|
@ -180,7 +165,7 @@ impl server::TokenStream for TokenIdServer {
|
||||||
|
|
||||||
let leaf = tt::Leaf::from(literal);
|
let leaf = tt::Leaf::from(literal);
|
||||||
let tree = TokenTree::from(leaf);
|
let tree = TokenTree::from(leaf);
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
TokenStream { token_trees: vec![tree] }
|
||||||
}
|
}
|
||||||
|
|
||||||
bridge::TokenTree::Punct(p) => {
|
bridge::TokenTree::Punct(p) => {
|
||||||
|
|
@ -191,7 +176,7 @@ impl server::TokenStream for TokenIdServer {
|
||||||
};
|
};
|
||||||
let leaf = tt::Leaf::from(punct);
|
let leaf = tt::Leaf::from(punct);
|
||||||
let tree = TokenTree::from(leaf);
|
let tree = TokenTree::from(leaf);
|
||||||
Self::TokenStream::from_iter(iter::once(tree))
|
TokenStream { token_trees: vec![tree] }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -234,42 +219,7 @@ impl server::TokenStream for TokenIdServer {
|
||||||
&mut self,
|
&mut self,
|
||||||
stream: Self::TokenStream,
|
stream: Self::TokenStream,
|
||||||
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
|
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
|
||||||
stream
|
stream.into_bridge()
|
||||||
.into_iter()
|
|
||||||
.map(|tree| match tree {
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
|
|
||||||
bridge::TokenTree::Ident(bridge::Ident {
|
|
||||||
sym: ident.sym,
|
|
||||||
is_raw: ident.is_raw.yes(),
|
|
||||||
span: ident.span,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
|
||||||
bridge::TokenTree::Literal(bridge::Literal {
|
|
||||||
span: lit.span,
|
|
||||||
kind: literal_kind_to_external(lit.kind),
|
|
||||||
symbol: lit.symbol,
|
|
||||||
suffix: lit.suffix,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
|
|
||||||
bridge::TokenTree::Punct(bridge::Punct {
|
|
||||||
ch: punct.char as u8,
|
|
||||||
joint: punct.spacing == Spacing::Joint,
|
|
||||||
span: punct.span,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
|
|
||||||
delimiter: delim_to_external(subtree.delimiter),
|
|
||||||
stream: if subtree.token_trees.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(TokenStream { token_trees: subtree.token_trees.into_vec() })
|
|
||||||
},
|
|
||||||
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -398,7 +348,7 @@ mod tests {
|
||||||
close: tt::TokenId(0),
|
close: tt::TokenId(0),
|
||||||
kind: tt::DelimiterKind::Brace,
|
kind: tt::DelimiterKind::Brace,
|
||||||
},
|
},
|
||||||
token_trees: Box::new([]),
|
len: 0,
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
@ -408,31 +358,34 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_ra_server_from_str() {
|
fn test_ra_server_from_str() {
|
||||||
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
|
let subtree_paren_a = vec![
|
||||||
|
tt::TokenTree::Subtree(tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
open: tt::TokenId(0),
|
open: tt::TokenId(0),
|
||||||
close: tt::TokenId(0),
|
close: tt::TokenId(0),
|
||||||
kind: tt::DelimiterKind::Parenthesis,
|
kind: tt::DelimiterKind::Parenthesis,
|
||||||
},
|
},
|
||||||
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
len: 1,
|
||||||
|
}),
|
||||||
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
sym: Symbol::intern("a"),
|
sym: Symbol::intern("a"),
|
||||||
span: tt::TokenId(0),
|
span: tt::TokenId(0),
|
||||||
}))]),
|
})),
|
||||||
});
|
];
|
||||||
|
|
||||||
let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
|
let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
|
||||||
assert_eq!(t1.token_trees.len(), 1);
|
assert_eq!(t1.token_trees.len(), 2);
|
||||||
assert_eq!(t1.token_trees[0], subtree_paren_a);
|
assert!(t1.token_trees[0..2] == subtree_paren_a);
|
||||||
|
|
||||||
let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
|
let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
|
||||||
assert_eq!(t2.token_trees.len(), 2);
|
assert_eq!(t2.token_trees.len(), 3);
|
||||||
assert_eq!(t2.token_trees[0], subtree_paren_a);
|
assert!(t2.token_trees[0..2] == subtree_paren_a);
|
||||||
|
|
||||||
let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
|
let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
|
||||||
assert_eq!(
|
assert!(
|
||||||
underscore.token_trees[0],
|
underscore.token_trees[0]
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
== tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
sym: Symbol::intern("_"),
|
sym: Symbol::intern("_"),
|
||||||
span: tt::TokenId(0),
|
span: tt::TokenId(0),
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,20 @@
|
||||||
//! TokenStream implementation used by sysroot ABI
|
//! TokenStream implementation used by sysroot ABI
|
||||||
|
|
||||||
use tt::TokenTree;
|
use proc_macro::bridge;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
use crate::server_impl::{delim_to_external, literal_kind_to_external, TopSubtree};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct TokenStream<S> {
|
pub struct TokenStream<S> {
|
||||||
pub(super) token_trees: Vec<TokenTree<S>>,
|
pub(super) token_trees: Vec<tt::TokenTree<S>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("TokenStream")
|
||||||
|
.field("token_trees", &tt::TokenTreesView::new(&self.token_trees))
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Default for TokenStream<S> {
|
impl<S> Default for TokenStream<S> {
|
||||||
|
|
@ -13,84 +23,85 @@ impl<S> Default for TokenStream<S> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> TokenStream<S> {
|
impl<S: Copy> TokenStream<S> {
|
||||||
pub(crate) fn new() -> Self {
|
pub(crate) fn new() -> Self {
|
||||||
TokenStream::default()
|
TokenStream::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn with_subtree(subtree: tt::Subtree<S>) -> Self {
|
pub(crate) fn with_subtree(subtree: TopSubtree<S>) -> Self {
|
||||||
if subtree.delimiter.kind != tt::DelimiterKind::Invisible {
|
let delimiter_kind = subtree.top_subtree().delimiter.kind;
|
||||||
TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
|
let mut token_trees = subtree.0;
|
||||||
} else {
|
if delimiter_kind == tt::DelimiterKind::Invisible {
|
||||||
TokenStream { token_trees: subtree.token_trees.into_vec() }
|
token_trees.remove(0);
|
||||||
}
|
}
|
||||||
|
TokenStream { token_trees }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn into_subtree(self, call_site: S) -> tt::Subtree<S>
|
pub(crate) fn into_subtree(mut self, call_site: S) -> TopSubtree<S>
|
||||||
where
|
where
|
||||||
S: Copy,
|
S: Copy,
|
||||||
{
|
{
|
||||||
tt::Subtree {
|
self.token_trees.insert(
|
||||||
|
0,
|
||||||
|
tt::TokenTree::Subtree(tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
open: call_site,
|
open: call_site,
|
||||||
close: call_site,
|
close: call_site,
|
||||||
kind: tt::DelimiterKind::Invisible,
|
kind: tt::DelimiterKind::Invisible,
|
||||||
},
|
},
|
||||||
token_trees: self.token_trees.into_boxed_slice(),
|
len: self.token_trees.len() as u32,
|
||||||
}
|
}),
|
||||||
|
);
|
||||||
|
TopSubtree(self.token_trees)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn is_empty(&self) -> bool {
|
pub(super) fn is_empty(&self) -> bool {
|
||||||
self.token_trees.is_empty()
|
self.token_trees.is_empty()
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a token stream containing a single token tree.
|
pub(crate) fn into_bridge(self) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
|
||||||
impl<S> From<TokenTree<S>> for TokenStream<S> {
|
let mut result = Vec::new();
|
||||||
fn from(tree: TokenTree<S>) -> TokenStream<S> {
|
let mut iter = self.token_trees.into_iter();
|
||||||
TokenStream { token_trees: vec![tree] }
|
while let Some(tree) = iter.next() {
|
||||||
|
match tree {
|
||||||
|
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
|
||||||
|
result.push(bridge::TokenTree::Ident(bridge::Ident {
|
||||||
|
sym: ident.sym,
|
||||||
|
is_raw: ident.is_raw.yes(),
|
||||||
|
span: ident.span,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
||||||
|
result.push(bridge::TokenTree::Literal(bridge::Literal {
|
||||||
/// Collects a number of token trees into a single stream.
|
span: lit.span,
|
||||||
impl<S> FromIterator<TokenTree<S>> for TokenStream<S> {
|
kind: literal_kind_to_external(lit.kind),
|
||||||
fn from_iter<I: IntoIterator<Item = TokenTree<S>>>(trees: I) -> Self {
|
symbol: lit.symbol,
|
||||||
trees.into_iter().map(TokenStream::from).collect()
|
suffix: lit.suffix,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
|
||||||
|
result.push(bridge::TokenTree::Punct(bridge::Punct {
|
||||||
/// A "flattening" operation on token streams, collects token trees
|
ch: punct.char as u8,
|
||||||
/// from multiple token streams into a single stream.
|
joint: punct.spacing == tt::Spacing::Joint,
|
||||||
impl<S> FromIterator<TokenStream<S>> for TokenStream<S> {
|
span: punct.span,
|
||||||
fn from_iter<I: IntoIterator<Item = TokenStream<S>>>(streams: I) -> Self {
|
}))
|
||||||
let mut builder = TokenStreamBuilder::new();
|
|
||||||
streams.into_iter().for_each(|stream| builder.push(stream));
|
|
||||||
builder.build()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> Extend<TokenTree<S>> for TokenStream<S> {
|
|
||||||
fn extend<I: IntoIterator<Item = TokenTree<S>>>(&mut self, trees: I) {
|
|
||||||
self.extend(trees.into_iter().map(TokenStream::from));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> Extend<TokenStream<S>> for TokenStream<S> {
|
|
||||||
fn extend<I: IntoIterator<Item = TokenStream<S>>>(&mut self, streams: I) {
|
|
||||||
for item in streams {
|
|
||||||
for tkn in item {
|
|
||||||
match tkn {
|
|
||||||
tt::TokenTree::Subtree(subtree)
|
|
||||||
if subtree.delimiter.kind == tt::DelimiterKind::Invisible =>
|
|
||||||
{
|
|
||||||
self.token_trees.extend(subtree.token_trees.into_vec().into_iter());
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
self.token_trees.push(tkn);
|
|
||||||
}
|
}
|
||||||
|
tt::TokenTree::Subtree(subtree) => {
|
||||||
|
result.push(bridge::TokenTree::Group(bridge::Group {
|
||||||
|
delimiter: delim_to_external(subtree.delimiter),
|
||||||
|
stream: if subtree.len == 0 {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(TokenStream {
|
||||||
|
token_trees: iter.by_ref().take(subtree.usize_len()).collect(),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -103,19 +114,7 @@ pub(super) mod token_stream_impls {
|
||||||
|
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
use super::{TokenStream, TokenTree};
|
use super::{TokenStream, TopSubtree};
|
||||||
|
|
||||||
/// An iterator over `TokenStream`'s `TokenTree`s.
|
|
||||||
/// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
|
|
||||||
/// and returns whole groups as token trees.
|
|
||||||
impl<S> IntoIterator for TokenStream<S> {
|
|
||||||
type Item = TokenTree<S>;
|
|
||||||
type IntoIter = std::vec::IntoIter<TokenTree<S>>;
|
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.token_trees.into_iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempts to break the string into tokens and parse those tokens into a token stream.
|
/// Attempts to break the string into tokens and parse those tokens into a token stream.
|
||||||
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
|
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
|
||||||
|
|
@ -133,7 +132,7 @@ pub(super) mod token_stream_impls {
|
||||||
)
|
)
|
||||||
.ok_or_else(|| format!("lexing error: {src}"))?;
|
.ok_or_else(|| format!("lexing error: {src}"))?;
|
||||||
|
|
||||||
Ok(TokenStream::with_subtree(subtree))
|
Ok(TokenStream::with_subtree(TopSubtree(subtree.0.into_vec())))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -145,13 +144,13 @@ pub(super) mod token_stream_impls {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> TokenStreamBuilder<S> {
|
impl<S: Copy> TokenStreamBuilder<S> {
|
||||||
pub(super) fn new() -> TokenStreamBuilder<S> {
|
pub(super) fn new() -> TokenStreamBuilder<S> {
|
||||||
TokenStreamBuilder { acc: TokenStream::new() }
|
TokenStreamBuilder { acc: TokenStream::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn push(&mut self, stream: TokenStream<S>) {
|
pub(super) fn push(&mut self, stream: TokenStream<S>) {
|
||||||
self.acc.extend(stream)
|
self.acc.token_trees.extend(stream.token_trees)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn build(self) -> TokenStream<S> {
|
pub(super) fn build(self) -> TokenStream<S> {
|
||||||
|
|
|
||||||
|
|
@ -7,10 +7,12 @@ use tt::TextRange;
|
||||||
use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
|
use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
|
||||||
|
|
||||||
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
|
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
|
||||||
crate::server_impl::TokenStream::with_subtree(
|
crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
|
||||||
syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
|
syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
|
||||||
.unwrap(),
|
.unwrap()
|
||||||
)
|
.0
|
||||||
|
.into_vec(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_string_spanned(
|
fn parse_string_spanned(
|
||||||
|
|
@ -18,9 +20,12 @@ fn parse_string_spanned(
|
||||||
call_site: SyntaxContextId,
|
call_site: SyntaxContextId,
|
||||||
src: &str,
|
src: &str,
|
||||||
) -> crate::server_impl::TokenStream<Span> {
|
) -> crate::server_impl::TokenStream<Span> {
|
||||||
crate::server_impl::TokenStream::with_subtree(
|
crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
|
||||||
syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
|
syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src)
|
||||||
)
|
.unwrap()
|
||||||
|
.0
|
||||||
|
.into_vec(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect, expect_s: Expect) {
|
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect, expect_s: Expect) {
|
||||||
|
|
|
||||||
|
|
@ -5,17 +5,14 @@ use std::fmt;
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use span::{Edition, SpanAnchor, SpanData, SpanMap};
|
use span::{Edition, SpanAnchor, SpanData, SpanMap};
|
||||||
use stdx::{format_to, never, non_empty_vec::NonEmptyVec};
|
use stdx::{format_to, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make::tokens::doc_comment},
|
ast::{self, make::tokens::doc_comment},
|
||||||
format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
|
format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
|
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
|
||||||
};
|
};
|
||||||
use tt::{
|
use tt::{buffer::Cursor, token_to_literal};
|
||||||
buffer::{Cursor, TokenBuffer},
|
|
||||||
token_to_literal,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub mod prettify_macro_expansion;
|
pub mod prettify_macro_expansion;
|
||||||
mod to_parser_input;
|
mod to_parser_input;
|
||||||
|
|
@ -99,7 +96,7 @@ pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
span: SpanData<Ctx>,
|
span: SpanData<Ctx>,
|
||||||
mode: DocCommentDesugarMode,
|
mode: DocCommentDesugarMode,
|
||||||
) -> tt::Subtree<SpanData<Ctx>>
|
) -> tt::TopSubtree<SpanData<Ctx>>
|
||||||
where
|
where
|
||||||
SpanData<Ctx>: Copy + fmt::Debug,
|
SpanData<Ctx>: Copy + fmt::Debug,
|
||||||
SpanMap: SpanMapper<SpanData<Ctx>>,
|
SpanMap: SpanMapper<SpanData<Ctx>>,
|
||||||
|
|
@ -118,7 +115,7 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
|
||||||
remove: FxHashSet<SyntaxElement>,
|
remove: FxHashSet<SyntaxElement>,
|
||||||
call_site: SpanData<Ctx>,
|
call_site: SpanData<Ctx>,
|
||||||
mode: DocCommentDesugarMode,
|
mode: DocCommentDesugarMode,
|
||||||
) -> tt::Subtree<SpanData<Ctx>>
|
) -> tt::TopSubtree<SpanData<Ctx>>
|
||||||
where
|
where
|
||||||
SpanMap: SpanMapper<SpanData<Ctx>>,
|
SpanMap: SpanMapper<SpanData<Ctx>>,
|
||||||
SpanData<Ctx>: Copy + fmt::Debug,
|
SpanData<Ctx>: Copy + fmt::Debug,
|
||||||
|
|
@ -142,7 +139,7 @@ where
|
||||||
/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
|
/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
|
||||||
/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
|
/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
|
||||||
pub fn token_tree_to_syntax_node<Ctx>(
|
pub fn token_tree_to_syntax_node<Ctx>(
|
||||||
tt: &tt::Subtree<SpanData<Ctx>>,
|
tt: &tt::TopSubtree<SpanData<Ctx>>,
|
||||||
entry_point: parser::TopEntryPoint,
|
entry_point: parser::TopEntryPoint,
|
||||||
edition: parser::Edition,
|
edition: parser::Edition,
|
||||||
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
|
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
|
||||||
|
|
@ -150,16 +147,10 @@ where
|
||||||
SpanData<Ctx>: Copy + fmt::Debug,
|
SpanData<Ctx>: Copy + fmt::Debug,
|
||||||
Ctx: PartialEq,
|
Ctx: PartialEq,
|
||||||
{
|
{
|
||||||
let buffer = match tt {
|
let buffer = tt.view().strip_invisible();
|
||||||
tt::Subtree {
|
let parser_input = to_parser_input(edition, buffer);
|
||||||
delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. },
|
|
||||||
token_trees,
|
|
||||||
} => TokenBuffer::from_tokens(token_trees),
|
|
||||||
_ => TokenBuffer::from_subtree(tt),
|
|
||||||
};
|
|
||||||
let parser_input = to_parser_input(edition, &buffer);
|
|
||||||
let parser_output = entry_point.parse(&parser_input, edition);
|
let parser_output = entry_point.parse(&parser_input, edition);
|
||||||
let mut tree_sink = TtTreeSink::new(buffer.begin());
|
let mut tree_sink = TtTreeSink::new(buffer.cursor());
|
||||||
for event in parser_output.iter() {
|
for event in parser_output.iter() {
|
||||||
match event {
|
match event {
|
||||||
parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
|
parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
|
||||||
|
|
@ -183,7 +174,7 @@ pub fn parse_to_token_tree<Ctx>(
|
||||||
anchor: SpanAnchor,
|
anchor: SpanAnchor,
|
||||||
ctx: Ctx,
|
ctx: Ctx,
|
||||||
text: &str,
|
text: &str,
|
||||||
) -> Option<tt::Subtree<SpanData<Ctx>>>
|
) -> Option<tt::TopSubtree<SpanData<Ctx>>>
|
||||||
where
|
where
|
||||||
SpanData<Ctx>: Copy + fmt::Debug,
|
SpanData<Ctx>: Copy + fmt::Debug,
|
||||||
Ctx: Copy,
|
Ctx: Copy,
|
||||||
|
|
@ -202,7 +193,7 @@ pub fn parse_to_token_tree_static_span<S>(
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
span: S,
|
span: S,
|
||||||
text: &str,
|
text: &str,
|
||||||
) -> Option<tt::Subtree<S>>
|
) -> Option<tt::TopSubtree<S>>
|
||||||
where
|
where
|
||||||
S: Copy + fmt::Debug,
|
S: Copy + fmt::Debug,
|
||||||
{
|
{
|
||||||
|
|
@ -215,47 +206,38 @@ where
|
||||||
Some(convert_tokens(&mut conv))
|
Some(convert_tokens(&mut conv))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
|
fn convert_tokens<S, C>(conv: &mut C) -> tt::TopSubtree<S>
|
||||||
where
|
where
|
||||||
C: TokenConverter<S>,
|
C: TokenConverter<S>,
|
||||||
S: Copy + fmt::Debug,
|
S: Copy + fmt::Debug,
|
||||||
C::Token: fmt::Debug,
|
C::Token: fmt::Debug,
|
||||||
{
|
{
|
||||||
let entry = tt::SubtreeBuilder {
|
let mut builder =
|
||||||
delimiter: tt::Delimiter::invisible_spanned(conv.call_site()),
|
tt::TopSubtreeBuilder::new(tt::Delimiter::invisible_spanned(conv.call_site()));
|
||||||
token_trees: vec![],
|
|
||||||
};
|
|
||||||
let mut stack = NonEmptyVec::new(entry);
|
|
||||||
|
|
||||||
while let Some((token, abs_range)) = conv.bump() {
|
while let Some((token, abs_range)) = conv.bump() {
|
||||||
let tt::SubtreeBuilder { delimiter, token_trees } = stack.last_mut();
|
let delimiter = builder.expected_delimiter().map(|it| it.kind);
|
||||||
|
|
||||||
let tt = match token.as_leaf() {
|
let tt = match token.as_leaf() {
|
||||||
Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
|
Some(leaf) => leaf.clone(),
|
||||||
None => match token.kind(conv) {
|
None => match token.kind(conv) {
|
||||||
// Desugar doc comments into doc attributes
|
// Desugar doc comments into doc attributes
|
||||||
COMMENT => {
|
COMMENT => {
|
||||||
let span = conv.span_for(abs_range);
|
let span = conv.span_for(abs_range);
|
||||||
if let Some(tokens) = conv.convert_doc_comment(&token, span) {
|
conv.convert_doc_comment(&token, span, &mut builder);
|
||||||
token_trees.extend(tokens);
|
|
||||||
}
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
kind if kind.is_punct() && kind != UNDERSCORE => {
|
kind if kind.is_punct() && kind != UNDERSCORE => {
|
||||||
let expected = match delimiter.kind {
|
let expected = match delimiter {
|
||||||
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
Some(tt::DelimiterKind::Parenthesis) => Some(T![')']),
|
||||||
tt::DelimiterKind::Brace => Some(T!['}']),
|
Some(tt::DelimiterKind::Brace) => Some(T!['}']),
|
||||||
tt::DelimiterKind::Bracket => Some(T![']']),
|
Some(tt::DelimiterKind::Bracket) => Some(T![']']),
|
||||||
tt::DelimiterKind::Invisible => None,
|
Some(tt::DelimiterKind::Invisible) | None => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Current token is a closing delimiter that we expect, fix up the closing span
|
// Current token is a closing delimiter that we expect, fix up the closing span
|
||||||
// and end the subtree here
|
// and end the subtree here
|
||||||
if matches!(expected, Some(expected) if expected == kind) {
|
if matches!(expected, Some(expected) if expected == kind) {
|
||||||
if let Some(mut subtree) = stack.pop() {
|
builder.close(conv.span_for(abs_range));
|
||||||
subtree.delimiter.close = conv.span_for(abs_range);
|
|
||||||
stack.last_mut().token_trees.push(subtree.build().into());
|
|
||||||
}
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -268,16 +250,7 @@ where
|
||||||
|
|
||||||
// Start a new subtree
|
// Start a new subtree
|
||||||
if let Some(kind) = delim {
|
if let Some(kind) = delim {
|
||||||
let open = conv.span_for(abs_range);
|
builder.open(kind, conv.span_for(abs_range));
|
||||||
stack.push(tt::SubtreeBuilder {
|
|
||||||
delimiter: tt::Delimiter {
|
|
||||||
open,
|
|
||||||
// will be overwritten on subtree close above
|
|
||||||
close: open,
|
|
||||||
kind,
|
|
||||||
},
|
|
||||||
token_trees: vec![],
|
|
||||||
});
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -289,7 +262,6 @@ where
|
||||||
panic!("Token from lexer must be single char: token = {token:#?}")
|
panic!("Token from lexer must be single char: token = {token:#?}")
|
||||||
};
|
};
|
||||||
tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
|
tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
kind => {
|
kind => {
|
||||||
macro_rules! make_ident {
|
macro_rules! make_ident {
|
||||||
|
|
@ -320,7 +292,7 @@ where
|
||||||
span: conv
|
span: conv
|
||||||
.span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
|
.span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
|
||||||
});
|
});
|
||||||
token_trees.push(apostrophe.into());
|
builder.push(apostrophe);
|
||||||
|
|
||||||
let ident = tt::Leaf::from(tt::Ident {
|
let ident = tt::Leaf::from(tt::Ident {
|
||||||
sym: Symbol::intern(&token.to_text(conv)[1..]),
|
sym: Symbol::intern(&token.to_text(conv)[1..]),
|
||||||
|
|
@ -330,47 +302,26 @@ where
|
||||||
)),
|
)),
|
||||||
is_raw: tt::IdentIsRaw::No,
|
is_raw: tt::IdentIsRaw::No,
|
||||||
});
|
});
|
||||||
token_trees.push(ident.into());
|
builder.push(ident);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
leaf.into()
|
leaf
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
token_trees.push(tt);
|
builder.push(tt);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we get here, we've consumed all input tokens.
|
// If we get here, we've consumed all input tokens.
|
||||||
// We might have more than one subtree in the stack, if the delimiters are improperly balanced.
|
// We might have more than one subtree in the stack, if the delimiters are improperly balanced.
|
||||||
// Merge them so we're left with one.
|
// Merge them so we're left with one.
|
||||||
while let Some(entry) = stack.pop() {
|
builder.flatten_unclosed_subtrees();
|
||||||
let parent = stack.last_mut();
|
|
||||||
|
|
||||||
let leaf: tt::Leaf<_> = tt::Punct {
|
builder.build_skip_top_subtree()
|
||||||
span: entry.delimiter.open,
|
|
||||||
char: match entry.delimiter.kind {
|
|
||||||
tt::DelimiterKind::Parenthesis => '(',
|
|
||||||
tt::DelimiterKind::Brace => '{',
|
|
||||||
tt::DelimiterKind::Bracket => '[',
|
|
||||||
tt::DelimiterKind::Invisible => '$',
|
|
||||||
},
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
}
|
|
||||||
.into();
|
|
||||||
parent.token_trees.push(leaf.into());
|
|
||||||
parent.token_trees.extend(entry.token_trees);
|
|
||||||
}
|
|
||||||
|
|
||||||
let subtree = stack.into_last().build();
|
|
||||||
if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
|
|
||||||
first.clone()
|
|
||||||
} else {
|
|
||||||
subtree
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_single_token_op(kind: SyntaxKind) -> bool {
|
fn is_single_token_op(kind: SyntaxKind) -> bool {
|
||||||
|
|
@ -436,25 +387,17 @@ fn convert_doc_comment<S: Copy>(
|
||||||
token: &syntax::SyntaxToken,
|
token: &syntax::SyntaxToken,
|
||||||
span: S,
|
span: S,
|
||||||
mode: DocCommentDesugarMode,
|
mode: DocCommentDesugarMode,
|
||||||
) -> Option<Vec<tt::TokenTree<S>>> {
|
builder: &mut tt::TopSubtreeBuilder<S>,
|
||||||
let comment = ast::Comment::cast(token.clone())?;
|
) {
|
||||||
let doc = comment.kind().doc?;
|
let Some(comment) = ast::Comment::cast(token.clone()) else { return };
|
||||||
|
let Some(doc) = comment.kind().doc else { return };
|
||||||
|
|
||||||
let mk_ident = |s: &str| {
|
let mk_ident = |s: &str| {
|
||||||
tt::TokenTree::from(tt::Leaf::from(tt::Ident {
|
tt::Leaf::from(tt::Ident { sym: Symbol::intern(s), span, is_raw: tt::IdentIsRaw::No })
|
||||||
sym: Symbol::intern(s),
|
|
||||||
span,
|
|
||||||
is_raw: tt::IdentIsRaw::No,
|
|
||||||
}))
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mk_punct = |c: char| {
|
let mk_punct =
|
||||||
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
|
|c: char| tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone, span });
|
||||||
char: c,
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
span,
|
|
||||||
}))
|
|
||||||
};
|
|
||||||
|
|
||||||
let mk_doc_literal = |comment: &ast::Comment| {
|
let mk_doc_literal = |comment: &ast::Comment| {
|
||||||
let prefix_len = comment.prefix().len();
|
let prefix_len = comment.prefix().len();
|
||||||
|
|
@ -467,24 +410,20 @@ fn convert_doc_comment<S: Copy>(
|
||||||
let (text, kind) = desugar_doc_comment_text(text, mode);
|
let (text, kind) = desugar_doc_comment_text(text, mode);
|
||||||
let lit = tt::Literal { symbol: text, span, kind, suffix: None };
|
let lit = tt::Literal { symbol: text, span, kind, suffix: None };
|
||||||
|
|
||||||
tt::TokenTree::from(tt::Leaf::from(lit))
|
tt::Leaf::from(lit)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Make `doc="\" Comments\""
|
// Make `doc="\" Comments\""
|
||||||
let meta_tkns = Box::new([mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]);
|
let meta_tkns = [mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
|
||||||
|
|
||||||
// Make `#![]`
|
// Make `#![]`
|
||||||
let mut token_trees = Vec::with_capacity(3);
|
builder.push(mk_punct('#'));
|
||||||
token_trees.push(mk_punct('#'));
|
|
||||||
if let ast::CommentPlacement::Inner = doc {
|
if let ast::CommentPlacement::Inner = doc {
|
||||||
token_trees.push(mk_punct('!'));
|
builder.push(mk_punct('!'));
|
||||||
}
|
}
|
||||||
token_trees.push(tt::TokenTree::from(tt::Subtree {
|
builder.open(tt::DelimiterKind::Bracket, span);
|
||||||
delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
|
builder.extend(meta_tkns);
|
||||||
token_trees: meta_tkns,
|
builder.close(span);
|
||||||
}));
|
|
||||||
|
|
||||||
Some(token_trees)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A raw token (straight from lexer) converter
|
/// A raw token (straight from lexer) converter
|
||||||
|
|
@ -518,7 +457,12 @@ trait SrcToken<Ctx, S> {
|
||||||
trait TokenConverter<S>: Sized {
|
trait TokenConverter<S>: Sized {
|
||||||
type Token: SrcToken<Self, S>;
|
type Token: SrcToken<Self, S>;
|
||||||
|
|
||||||
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
|
fn convert_doc_comment(
|
||||||
|
&self,
|
||||||
|
token: &Self::Token,
|
||||||
|
span: S,
|
||||||
|
builder: &mut tt::TopSubtreeBuilder<S>,
|
||||||
|
);
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
|
||||||
|
|
||||||
|
|
@ -567,9 +511,10 @@ where
|
||||||
&self,
|
&self,
|
||||||
&token: &usize,
|
&token: &usize,
|
||||||
span: SpanData<Ctx>,
|
span: SpanData<Ctx>,
|
||||||
) -> Option<Vec<tt::TokenTree<SpanData<Ctx>>>> {
|
builder: &mut tt::TopSubtreeBuilder<SpanData<Ctx>>,
|
||||||
|
) {
|
||||||
let text = self.lexed.text(token);
|
let text = self.lexed.text(token);
|
||||||
convert_doc_comment(&doc_comment(text), span, self.mode)
|
convert_doc_comment(&doc_comment(text), span, self.mode, builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
||||||
|
|
@ -606,9 +551,9 @@ where
|
||||||
{
|
{
|
||||||
type Token = usize;
|
type Token = usize;
|
||||||
|
|
||||||
fn convert_doc_comment(&self, &token: &usize, span: S) -> Option<Vec<tt::TokenTree<S>>> {
|
fn convert_doc_comment(&self, &token: &usize, span: S, builder: &mut tt::TopSubtreeBuilder<S>) {
|
||||||
let text = self.lexed.text(token);
|
let text = self.lexed.text(token);
|
||||||
convert_doc_comment(&doc_comment(text), span, self.mode)
|
convert_doc_comment(&doc_comment(text), span, self.mode, builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
||||||
|
|
@ -773,8 +718,13 @@ where
|
||||||
SpanMap: SpanMapper<S>,
|
SpanMap: SpanMapper<S>,
|
||||||
{
|
{
|
||||||
type Token = SynToken<S>;
|
type Token = SynToken<S>;
|
||||||
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
|
fn convert_doc_comment(
|
||||||
convert_doc_comment(token.token(), span, self.mode)
|
&self,
|
||||||
|
token: &Self::Token,
|
||||||
|
span: S,
|
||||||
|
builder: &mut tt::TopSubtreeBuilder<S>,
|
||||||
|
) {
|
||||||
|
convert_doc_comment(token.token(), span, self.mode, builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
||||||
|
|
@ -899,15 +849,12 @@ where
|
||||||
/// This occurs when a float literal is used as a field access.
|
/// This occurs when a float literal is used as a field access.
|
||||||
fn float_split(&mut self, has_pseudo_dot: bool) {
|
fn float_split(&mut self, has_pseudo_dot: bool) {
|
||||||
let (text, span) = match self.cursor.token_tree() {
|
let (text, span) = match self.cursor.token_tree() {
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(
|
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
|
||||||
tt::Leaf::Literal(tt::Literal {
|
|
||||||
symbol: text,
|
symbol: text,
|
||||||
span,
|
span,
|
||||||
kind: tt::LitKind::Float,
|
kind: tt::LitKind::Float,
|
||||||
suffix: _,
|
suffix: _,
|
||||||
}),
|
}))) => (text.as_str(), *span),
|
||||||
_,
|
|
||||||
)) => (text.as_str(), *span),
|
|
||||||
tt => unreachable!("{tt:?}"),
|
tt => unreachable!("{tt:?}"),
|
||||||
};
|
};
|
||||||
// FIXME: Span splitting
|
// FIXME: Span splitting
|
||||||
|
|
@ -942,7 +889,7 @@ where
|
||||||
}
|
}
|
||||||
None => unreachable!(),
|
None => unreachable!(),
|
||||||
}
|
}
|
||||||
self.cursor = self.cursor.bump();
|
self.cursor.bump();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
|
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
|
||||||
|
|
@ -950,24 +897,24 @@ where
|
||||||
n_tokens = 2;
|
n_tokens = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut last = self.cursor;
|
let mut last_two = self.cursor.peek_two_leaves();
|
||||||
let mut combined_span = None;
|
let mut combined_span = None;
|
||||||
'tokens: for _ in 0..n_tokens {
|
'tokens: for _ in 0..n_tokens {
|
||||||
let tmp: u8;
|
let tmp: u8;
|
||||||
if self.cursor.eof() {
|
if self.cursor.eof() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
last = self.cursor;
|
last_two = self.cursor.peek_two_leaves();
|
||||||
let (text, span) = loop {
|
let (text, span) = loop {
|
||||||
break match self.cursor.token_tree() {
|
break match self.cursor.token_tree() {
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => match leaf {
|
Some(tt::TokenTree::Leaf(leaf)) => match leaf {
|
||||||
tt::Leaf::Ident(ident) => {
|
tt::Leaf::Ident(ident) => {
|
||||||
if ident.is_raw.yes() {
|
if ident.is_raw.yes() {
|
||||||
self.buf.push_str("r#");
|
self.buf.push_str("r#");
|
||||||
self.text_pos += TextSize::of("r#");
|
self.text_pos += TextSize::of("r#");
|
||||||
}
|
}
|
||||||
let r = (ident.sym.as_str(), ident.span);
|
let r = (ident.sym.as_str(), ident.span);
|
||||||
self.cursor = self.cursor.bump();
|
self.cursor.bump();
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
tt::Leaf::Punct(punct) => {
|
tt::Leaf::Punct(punct) => {
|
||||||
|
|
@ -977,7 +924,7 @@ where
|
||||||
std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(),
|
std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(),
|
||||||
punct.span,
|
punct.span,
|
||||||
);
|
);
|
||||||
self.cursor = self.cursor.bump();
|
self.cursor.bump();
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
tt::Leaf::Literal(lit) => {
|
tt::Leaf::Literal(lit) => {
|
||||||
|
|
@ -989,20 +936,19 @@ where
|
||||||
None => Some(lit.span),
|
None => Some(lit.span),
|
||||||
Some(prev_span) => Some(Self::merge_spans(prev_span, lit.span)),
|
Some(prev_span) => Some(Self::merge_spans(prev_span, lit.span)),
|
||||||
};
|
};
|
||||||
self.cursor = self.cursor.bump();
|
self.cursor.bump();
|
||||||
continue 'tokens;
|
continue 'tokens;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
|
Some(tt::TokenTree::Subtree(subtree)) => {
|
||||||
self.cursor = self.cursor.subtree().unwrap();
|
self.cursor.bump();
|
||||||
match delim_to_str(subtree.delimiter.kind, false) {
|
match delim_to_str(subtree.delimiter.kind, false) {
|
||||||
Some(it) => (it, subtree.delimiter.open),
|
Some(it) => (it, subtree.delimiter.open),
|
||||||
None => continue,
|
None => continue,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let parent = self.cursor.end().unwrap();
|
let parent = self.cursor.end();
|
||||||
self.cursor = self.cursor.bump();
|
|
||||||
match delim_to_str(parent.delimiter.kind, true) {
|
match delim_to_str(parent.delimiter.kind, true) {
|
||||||
Some(it) => (it, parent.delimiter.close),
|
Some(it) => (it, parent.delimiter.close),
|
||||||
None => continue,
|
None => continue,
|
||||||
|
|
@ -1023,12 +969,7 @@ where
|
||||||
self.buf.clear();
|
self.buf.clear();
|
||||||
// FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
|
// FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
|
||||||
// Add whitespace between adjoint puncts
|
// Add whitespace between adjoint puncts
|
||||||
let next = last.bump();
|
if let Some([tt::Leaf::Punct(curr), tt::Leaf::Punct(next)]) = last_two {
|
||||||
if let (
|
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
|
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(next), _)),
|
|
||||||
) = (last.token_tree(), next.token_tree())
|
|
||||||
{
|
|
||||||
// Note: We always assume the semi-colon would be the last token in
|
// Note: We always assume the semi-colon would be the last token in
|
||||||
// other parts of RA such that we don't add whitespace here.
|
// other parts of RA such that we don't add whitespace here.
|
||||||
//
|
//
|
||||||
|
|
|
||||||
|
|
@ -2,10 +2,7 @@ use rustc_hash::FxHashMap;
|
||||||
use span::Span;
|
use span::Span;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
use test_utils::extract_annotations;
|
use test_utils::extract_annotations;
|
||||||
use tt::{
|
use tt::{buffer::Cursor, Leaf, Punct, Spacing};
|
||||||
buffer::{TokenBuffer, TokenTreeRef},
|
|
||||||
Leaf, Punct, Spacing,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||||
|
|
@ -32,22 +29,22 @@ fn check_punct_spacing(fixture: &str) {
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let buf = TokenBuffer::from_subtree(&subtree);
|
let mut cursor = Cursor::new(&subtree.0);
|
||||||
let mut cursor = buf.begin();
|
|
||||||
while !cursor.eof() {
|
while !cursor.eof() {
|
||||||
while let Some(token_tree) = cursor.token_tree() {
|
while let Some(token_tree) = cursor.token_tree() {
|
||||||
if let TokenTreeRef::Leaf(
|
if let tt::TokenTree::Leaf(Leaf::Punct(Punct {
|
||||||
Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }),
|
spacing,
|
||||||
_,
|
span: Span { range, .. },
|
||||||
) = token_tree
|
..
|
||||||
|
})) = token_tree
|
||||||
{
|
{
|
||||||
if let Some(expected) = annotations.remove(range) {
|
if let Some(expected) = annotations.remove(range) {
|
||||||
assert_eq!(expected, *spacing);
|
assert_eq!(expected, *spacing);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cursor = cursor.bump_subtree();
|
cursor.bump();
|
||||||
}
|
}
|
||||||
cursor = cursor.bump();
|
cursor.bump_or_end();
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}");
|
assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}");
|
||||||
|
|
|
||||||
|
|
@ -6,37 +6,34 @@ use std::fmt;
|
||||||
use span::Edition;
|
use span::Edition;
|
||||||
use syntax::{SyntaxKind, SyntaxKind::*, T};
|
use syntax::{SyntaxKind, SyntaxKind::*, T};
|
||||||
|
|
||||||
use tt::buffer::TokenBuffer;
|
|
||||||
|
|
||||||
pub fn to_parser_input<S: Copy + fmt::Debug>(
|
pub fn to_parser_input<S: Copy + fmt::Debug>(
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
buffer: &TokenBuffer<'_, S>,
|
buffer: tt::TokenTreesView<'_, S>,
|
||||||
) -> parser::Input {
|
) -> parser::Input {
|
||||||
let mut res = parser::Input::default();
|
let mut res = parser::Input::default();
|
||||||
|
|
||||||
let mut current = buffer.begin();
|
let mut current = buffer.cursor();
|
||||||
|
|
||||||
while !current.eof() {
|
while !current.eof() {
|
||||||
let cursor = current;
|
let tt = current.token_tree();
|
||||||
let tt = cursor.token_tree();
|
|
||||||
|
|
||||||
// Check if it is lifetime
|
// Check if it is lifetime
|
||||||
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
|
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tt {
|
||||||
if punct.char == '\'' {
|
if punct.char == '\'' {
|
||||||
let next = cursor.bump();
|
current.bump();
|
||||||
match next.token_tree() {
|
match current.token_tree() {
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(_ident), _)) => {
|
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(_ident))) => {
|
||||||
res.push(LIFETIME_IDENT);
|
res.push(LIFETIME_IDENT);
|
||||||
current = next.bump();
|
current.bump();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
_ => panic!("Next token must be ident : {:#?}", next.token_tree()),
|
_ => panic!("Next token must be ident"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
current = match tt {
|
match tt {
|
||||||
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
|
Some(tt::TokenTree::Leaf(leaf)) => {
|
||||||
match leaf {
|
match leaf {
|
||||||
tt::Leaf::Literal(lit) => {
|
tt::Leaf::Literal(lit) => {
|
||||||
let kind = match lit.kind {
|
let kind = match lit.kind {
|
||||||
|
|
@ -83,9 +80,9 @@ pub fn to_parser_input<S: Copy + fmt::Debug>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cursor.bump()
|
current.bump();
|
||||||
}
|
}
|
||||||
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
|
Some(tt::TokenTree::Subtree(subtree)) => {
|
||||||
if let Some(kind) = match subtree.delimiter.kind {
|
if let Some(kind) = match subtree.delimiter.kind {
|
||||||
tt::DelimiterKind::Parenthesis => Some(T!['(']),
|
tt::DelimiterKind::Parenthesis => Some(T!['(']),
|
||||||
tt::DelimiterKind::Brace => Some(T!['{']),
|
tt::DelimiterKind::Brace => Some(T!['{']),
|
||||||
|
|
@ -94,10 +91,10 @@ pub fn to_parser_input<S: Copy + fmt::Debug>(
|
||||||
} {
|
} {
|
||||||
res.push(kind);
|
res.push(kind);
|
||||||
}
|
}
|
||||||
cursor.subtree().unwrap()
|
current.bump();
|
||||||
}
|
}
|
||||||
None => match cursor.end() {
|
None => {
|
||||||
Some(subtree) => {
|
let subtree = current.end();
|
||||||
if let Some(kind) = match subtree.delimiter.kind {
|
if let Some(kind) = match subtree.delimiter.kind {
|
||||||
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
||||||
tt::DelimiterKind::Brace => Some(T!['}']),
|
tt::DelimiterKind::Brace => Some(T!['}']),
|
||||||
|
|
@ -106,10 +103,7 @@ pub fn to_parser_input<S: Copy + fmt::Debug>(
|
||||||
} {
|
} {
|
||||||
res.push(kind);
|
res.push(kind);
|
||||||
}
|
}
|
||||||
cursor.bump()
|
|
||||||
}
|
}
|
||||||
None => continue,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,16 +13,18 @@ use hir_expand::{
|
||||||
proc_macro::{
|
proc_macro::{
|
||||||
ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacrosBuilder,
|
ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacrosBuilder,
|
||||||
},
|
},
|
||||||
quote, FileRange,
|
quote,
|
||||||
|
tt::{Leaf, TokenTree, TopSubtree, TopSubtreeBuilder, TtElement, TtIter},
|
||||||
|
FileRange,
|
||||||
};
|
};
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use span::{Edition, EditionedFileId, FileId, Span};
|
use span::{Edition, EditionedFileId, FileId, Span};
|
||||||
|
use stdx::itertools::Itertools;
|
||||||
use test_utils::{
|
use test_utils::{
|
||||||
extract_range_or_offset, Fixture, FixtureWithProjectMeta, RangeOrOffset, CURSOR_MARKER,
|
extract_range_or_offset, Fixture, FixtureWithProjectMeta, RangeOrOffset, CURSOR_MARKER,
|
||||||
ESCAPED_CURSOR_MARKER,
|
ESCAPED_CURSOR_MARKER,
|
||||||
};
|
};
|
||||||
use tt::{Leaf, Subtree, TokenTree};
|
|
||||||
|
|
||||||
pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
|
pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
|
||||||
|
|
||||||
|
|
@ -580,14 +582,14 @@ struct IdentityProcMacroExpander;
|
||||||
impl ProcMacroExpander for IdentityProcMacroExpander {
|
impl ProcMacroExpander for IdentityProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &Subtree<Span>,
|
subtree: &TopSubtree,
|
||||||
_: Option<&Subtree<Span>>,
|
_: Option<&TopSubtree>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Option<String>,
|
_: Option<String>,
|
||||||
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
|
) -> Result<TopSubtree, ProcMacroExpansionError> {
|
||||||
Ok(subtree.clone())
|
Ok(subtree.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -598,15 +600,17 @@ struct Issue18089ProcMacroExpander;
|
||||||
impl ProcMacroExpander for Issue18089ProcMacroExpander {
|
impl ProcMacroExpander for Issue18089ProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &Subtree<Span>,
|
subtree: &TopSubtree,
|
||||||
_: Option<&Subtree<Span>>,
|
_: Option<&TopSubtree>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
_: Span,
|
_: Span,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Option<String>,
|
_: Option<String>,
|
||||||
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
|
) -> Result<TopSubtree, ProcMacroExpansionError> {
|
||||||
let macro_name = &subtree.token_trees[1];
|
let tt::TokenTree::Leaf(macro_name) = &subtree.0[2] else {
|
||||||
|
return Err(ProcMacroExpansionError::Panic("incorrect input".to_owned()));
|
||||||
|
};
|
||||||
Ok(quote! { call_site =>
|
Ok(quote! { call_site =>
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! my_macro___ {
|
macro_rules! my_macro___ {
|
||||||
|
|
@ -627,14 +631,14 @@ struct AttributeInputReplaceProcMacroExpander;
|
||||||
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
|
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
_: &Subtree<Span>,
|
_: &TopSubtree,
|
||||||
attrs: Option<&Subtree<Span>>,
|
attrs: Option<&TopSubtree>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Option<String>,
|
_: Option<String>,
|
||||||
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
|
) -> Result<TopSubtree, ProcMacroExpansionError> {
|
||||||
attrs
|
attrs
|
||||||
.cloned()
|
.cloned()
|
||||||
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
|
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
|
||||||
|
|
@ -646,26 +650,29 @@ struct MirrorProcMacroExpander;
|
||||||
impl ProcMacroExpander for MirrorProcMacroExpander {
|
impl ProcMacroExpander for MirrorProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
input: &Subtree<Span>,
|
input: &TopSubtree,
|
||||||
_: Option<&Subtree<Span>>,
|
_: Option<&TopSubtree>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Option<String>,
|
_: Option<String>,
|
||||||
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
|
) -> Result<TopSubtree, ProcMacroExpansionError> {
|
||||||
fn traverse(input: &Subtree<Span>) -> Subtree<Span> {
|
fn traverse(builder: &mut TopSubtreeBuilder, iter: TtIter<'_>) {
|
||||||
let mut token_trees = vec![];
|
for tt in iter.collect_vec().into_iter().rev() {
|
||||||
for tt in input.token_trees.iter().rev() {
|
match tt {
|
||||||
let tt = match tt {
|
TtElement::Leaf(leaf) => builder.push(leaf.clone()),
|
||||||
tt::TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(leaf.clone()),
|
TtElement::Subtree(subtree, subtree_iter) => {
|
||||||
tt::TokenTree::Subtree(sub) => tt::TokenTree::Subtree(traverse(sub)),
|
builder.open(subtree.delimiter.kind, subtree.delimiter.open);
|
||||||
};
|
traverse(builder, subtree_iter);
|
||||||
token_trees.push(tt);
|
builder.close(subtree.delimiter.close);
|
||||||
}
|
}
|
||||||
Subtree { delimiter: input.delimiter, token_trees: token_trees.into_boxed_slice() }
|
|
||||||
}
|
}
|
||||||
Ok(traverse(input))
|
}
|
||||||
|
}
|
||||||
|
let mut builder = TopSubtreeBuilder::new(input.top_subtree().delimiter);
|
||||||
|
traverse(&mut builder, input.iter());
|
||||||
|
Ok(builder.build())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -677,31 +684,24 @@ struct ShortenProcMacroExpander;
|
||||||
impl ProcMacroExpander for ShortenProcMacroExpander {
|
impl ProcMacroExpander for ShortenProcMacroExpander {
|
||||||
fn expand(
|
fn expand(
|
||||||
&self,
|
&self,
|
||||||
input: &Subtree<Span>,
|
input: &TopSubtree,
|
||||||
_: Option<&Subtree<Span>>,
|
_: Option<&TopSubtree>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Option<String>,
|
_: Option<String>,
|
||||||
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
|
) -> Result<TopSubtree, ProcMacroExpansionError> {
|
||||||
return Ok(traverse(input));
|
let mut result = input.0.clone();
|
||||||
|
for it in &mut result {
|
||||||
fn traverse(input: &Subtree<Span>) -> Subtree<Span> {
|
if let TokenTree::Leaf(leaf) = it {
|
||||||
let token_trees = input
|
modify_leaf(leaf)
|
||||||
.token_trees
|
|
||||||
.iter()
|
|
||||||
.map(|it| match it {
|
|
||||||
TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(modify_leaf(leaf)),
|
|
||||||
TokenTree::Subtree(subtree) => tt::TokenTree::Subtree(traverse(subtree)),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Subtree { delimiter: input.delimiter, token_trees }
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
return Ok(tt::TopSubtree(result));
|
||||||
|
|
||||||
fn modify_leaf(leaf: &Leaf<Span>) -> Leaf<Span> {
|
fn modify_leaf(leaf: &mut Leaf) {
|
||||||
let mut leaf = leaf.clone();
|
match leaf {
|
||||||
match &mut leaf {
|
|
||||||
Leaf::Literal(it) => {
|
Leaf::Literal(it) => {
|
||||||
// XXX Currently replaces any literals with an empty string, but supporting
|
// XXX Currently replaces any literals with an empty string, but supporting
|
||||||
// "shortening" other literals would be nice.
|
// "shortening" other literals would be nice.
|
||||||
|
|
@ -712,7 +712,6 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
|
||||||
it.sym = Symbol::intern(&it.sym.as_str().chars().take(1).collect::<String>());
|
it.sym = Symbol::intern(&it.sym.as_str().chars().take(1).collect::<String>());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
leaf
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,259 +1,108 @@
|
||||||
//! Stateful iteration over token trees.
|
//! Stateful iteration over token trees.
|
||||||
//!
|
//!
|
||||||
//! We use this as the source of tokens for parser.
|
//! We use this as the source of tokens for parser.
|
||||||
use crate::{Leaf, Subtree, TokenTree};
|
use crate::{Leaf, Subtree, TokenTree, TokenTreesView};
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
struct EntryId(usize);
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
|
||||||
struct EntryPtr(
|
|
||||||
/// The index of the buffer containing the entry.
|
|
||||||
EntryId,
|
|
||||||
/// The index of the entry within the buffer.
|
|
||||||
usize,
|
|
||||||
);
|
|
||||||
|
|
||||||
/// Internal type which is used instead of `TokenTree` to represent a token tree
|
|
||||||
/// within a `TokenBuffer`.
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum Entry<'t, Span> {
|
|
||||||
// Mimicking types from proc-macro.
|
|
||||||
Subtree(Option<&'t TokenTree<Span>>, &'t Subtree<Span>, EntryId),
|
|
||||||
Leaf(&'t TokenTree<Span>),
|
|
||||||
/// End entries contain a pointer to the entry from the containing
|
|
||||||
/// token tree, or [`None`] if this is the outermost level.
|
|
||||||
End(Option<EntryPtr>),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A token tree buffer
|
|
||||||
/// The safe version of `syn` [`TokenBuffer`](https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L41)
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct TokenBuffer<'t, Span> {
|
|
||||||
buffers: Vec<Box<[Entry<'t, Span>]>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
trait TokenList<'a, Span> {
|
|
||||||
fn entries(
|
|
||||||
&self,
|
|
||||||
) -> (Vec<(usize, (&'a Subtree<Span>, Option<&'a TokenTree<Span>>))>, Vec<Entry<'a, Span>>);
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, Span> TokenList<'a, Span> for &'a [TokenTree<Span>] {
|
|
||||||
fn entries(
|
|
||||||
&self,
|
|
||||||
) -> (Vec<(usize, (&'a Subtree<Span>, Option<&'a TokenTree<Span>>))>, Vec<Entry<'a, Span>>)
|
|
||||||
{
|
|
||||||
// Must contain everything in tokens and then the Entry::End
|
|
||||||
let start_capacity = self.len() + 1;
|
|
||||||
let mut entries = Vec::with_capacity(start_capacity);
|
|
||||||
let mut children = vec![];
|
|
||||||
for (idx, tt) in self.iter().enumerate() {
|
|
||||||
match tt {
|
|
||||||
TokenTree::Leaf(_) => {
|
|
||||||
entries.push(Entry::Leaf(tt));
|
|
||||||
}
|
|
||||||
TokenTree::Subtree(subtree) => {
|
|
||||||
entries.push(Entry::End(None));
|
|
||||||
children.push((idx, (subtree, Some(tt))));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(children, entries)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, Span> TokenList<'a, Span> for &'a Subtree<Span> {
|
|
||||||
fn entries(
|
|
||||||
&self,
|
|
||||||
) -> (Vec<(usize, (&'a Subtree<Span>, Option<&'a TokenTree<Span>>))>, Vec<Entry<'a, Span>>)
|
|
||||||
{
|
|
||||||
// Must contain everything in tokens and then the Entry::End
|
|
||||||
let mut entries = vec![];
|
|
||||||
let mut children = vec![];
|
|
||||||
entries.push(Entry::End(None));
|
|
||||||
children.push((0usize, (*self, None)));
|
|
||||||
(children, entries)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'t, Span> TokenBuffer<'t, Span> {
|
|
||||||
pub fn from_tokens(tokens: &'t [TokenTree<Span>]) -> TokenBuffer<'t, Span> {
|
|
||||||
Self::new(tokens)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_subtree(subtree: &'t Subtree<Span>) -> TokenBuffer<'t, Span> {
|
|
||||||
Self::new(subtree)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new<T: TokenList<'t, Span>>(tokens: T) -> TokenBuffer<'t, Span> {
|
|
||||||
let mut buffers = vec![];
|
|
||||||
let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
|
|
||||||
assert_eq!(idx, 0);
|
|
||||||
TokenBuffer { buffers }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_inner<T: TokenList<'t, Span>>(
|
|
||||||
tokens: T,
|
|
||||||
buffers: &mut Vec<Box<[Entry<'t, Span>]>>,
|
|
||||||
next: Option<EntryPtr>,
|
|
||||||
) -> usize {
|
|
||||||
let (children, mut entries) = tokens.entries();
|
|
||||||
|
|
||||||
entries.push(Entry::End(next));
|
|
||||||
let res = buffers.len();
|
|
||||||
buffers.push(entries.into_boxed_slice());
|
|
||||||
|
|
||||||
for (child_idx, (subtree, tt)) in children {
|
|
||||||
let idx = TokenBuffer::new_inner(
|
|
||||||
&*subtree.token_trees,
|
|
||||||
buffers,
|
|
||||||
Some(EntryPtr(EntryId(res), child_idx + 1)),
|
|
||||||
);
|
|
||||||
buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx));
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a cursor referencing the first token in the buffer and able to
|
|
||||||
/// traverse until the end of the buffer.
|
|
||||||
pub fn begin(&self) -> Cursor<'_, Span> {
|
|
||||||
Cursor::create(self, EntryPtr(EntryId(0), 0))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn entry(&self, ptr: &EntryPtr) -> Option<&Entry<'_, Span>> {
|
|
||||||
let id = ptr.0;
|
|
||||||
self.buffers[id.0].get(ptr.1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum TokenTreeRef<'a, Span> {
|
|
||||||
Subtree(&'a Subtree<Span>, Option<&'a TokenTree<Span>>),
|
|
||||||
Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Span: Copy> TokenTreeRef<'_, Span> {
|
|
||||||
pub fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
TokenTreeRef::Subtree(subtree, _) => subtree.delimiter.open,
|
|
||||||
TokenTreeRef::Leaf(leaf, _) => *leaf.span(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Span: Clone> TokenTreeRef<'_, Span> {
|
|
||||||
pub fn cloned(&self) -> TokenTree<Span> {
|
|
||||||
match self {
|
|
||||||
TokenTreeRef::Subtree(subtree, tt) => match tt {
|
|
||||||
Some(it) => (*it).clone(),
|
|
||||||
None => (*subtree).clone().into(),
|
|
||||||
},
|
|
||||||
TokenTreeRef::Leaf(_, tt) => (*tt).clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A safe version of `Cursor` from `syn` crate <https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125>
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
pub struct Cursor<'a, Span> {
|
pub struct Cursor<'a, Span> {
|
||||||
buffer: &'a TokenBuffer<'a, Span>,
|
buffer: &'a [TokenTree<Span>],
|
||||||
ptr: EntryPtr,
|
index: usize,
|
||||||
|
subtrees_stack: Vec<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Span> PartialEq for Cursor<'_, Span> {
|
impl<'a, Span: Copy> Cursor<'a, Span> {
|
||||||
fn eq(&self, other: &Cursor<'_, Span>) -> bool {
|
pub fn new(buffer: &'a [TokenTree<Span>]) -> Self {
|
||||||
self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer)
|
Self { buffer, index: 0, subtrees_stack: Vec::new() }
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl<Span> Eq for Cursor<'_, Span> {}
|
|
||||||
|
|
||||||
impl<'a, Span> Cursor<'a, Span> {
|
|
||||||
/// Check whether it is eof
|
/// Check whether it is eof
|
||||||
pub fn eof(self) -> bool {
|
pub fn eof(&self) -> bool {
|
||||||
matches!(self.buffer.entry(&self.ptr), None | Some(Entry::End(None)))
|
self.index == self.buffer.len() && self.subtrees_stack.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the cursor is pointing at the end of a subtree, returns
|
|
||||||
/// the parent subtree
|
|
||||||
pub fn end(self) -> Option<&'a Subtree<Span>> {
|
|
||||||
match self.entry() {
|
|
||||||
Some(Entry::End(Some(ptr))) => {
|
|
||||||
let idx = ptr.1;
|
|
||||||
if let Some(Entry::Subtree(_, subtree, _)) =
|
|
||||||
self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
|
|
||||||
{
|
|
||||||
return Some(subtree);
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn entry(&self) -> Option<&'a Entry<'a, Span>> {
|
|
||||||
self.buffer.entry(&self.ptr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If the cursor is pointing at a `Subtree`, returns
|
|
||||||
/// a cursor into that subtree
|
|
||||||
pub fn subtree(self) -> Option<Cursor<'a, Span>> {
|
|
||||||
match self.entry() {
|
|
||||||
Some(Entry::Subtree(_, _, entry_id)) => {
|
|
||||||
Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If the cursor is pointing at a `TokenTree`, returns it
|
|
||||||
pub fn token_tree(self) -> Option<TokenTreeRef<'a, Span>> {
|
|
||||||
match self.entry() {
|
|
||||||
Some(Entry::Leaf(tt)) => match tt {
|
|
||||||
TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, tt)),
|
|
||||||
TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
|
|
||||||
},
|
|
||||||
Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
|
|
||||||
Some(Entry::End(_)) | None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create(buffer: &'a TokenBuffer<'_, Span>, ptr: EntryPtr) -> Cursor<'a, Span> {
|
|
||||||
Cursor { buffer, ptr }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Bump the cursor
|
|
||||||
pub fn bump(self) -> Cursor<'a, Span> {
|
|
||||||
if let Some(Entry::End(exit)) = self.buffer.entry(&self.ptr) {
|
|
||||||
match exit {
|
|
||||||
Some(exit) => Cursor::create(self.buffer, *exit),
|
|
||||||
None => self,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Cursor::create(self.buffer, EntryPtr(self.ptr.0, self.ptr.1 + 1))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Bump the cursor, if it is a subtree, returns
|
|
||||||
/// a cursor into that subtree
|
|
||||||
pub fn bump_subtree(self) -> Cursor<'a, Span> {
|
|
||||||
match self.entry() {
|
|
||||||
Some(&Entry::Subtree(_, _, entry_id)) => {
|
|
||||||
Cursor::create(self.buffer, EntryPtr(entry_id, 0))
|
|
||||||
}
|
|
||||||
Some(Entry::End(exit)) => match exit {
|
|
||||||
Some(exit) => Cursor::create(self.buffer, *exit),
|
|
||||||
None => self,
|
|
||||||
},
|
|
||||||
_ => Cursor::create(self.buffer, EntryPtr(self.ptr.0, self.ptr.1 + 1)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check whether it is a top level
|
|
||||||
pub fn is_root(&self) -> bool {
|
pub fn is_root(&self) -> bool {
|
||||||
let entry_id = self.ptr.0;
|
self.subtrees_stack.is_empty()
|
||||||
entry_id.0 == 0
|
}
|
||||||
|
|
||||||
|
fn last_subtree(&self) -> Option<(usize, &'a Subtree<Span>)> {
|
||||||
|
self.subtrees_stack.last().map(|&subtree_idx| {
|
||||||
|
let TokenTree::Subtree(subtree) = &self.buffer[subtree_idx] else {
|
||||||
|
panic!("subtree pointing to non-subtree");
|
||||||
|
};
|
||||||
|
(subtree_idx, subtree)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn end(&mut self) -> &'a Subtree<Span> {
|
||||||
|
let (last_subtree_idx, last_subtree) =
|
||||||
|
self.last_subtree().expect("called `Cursor::end()` without an open subtree");
|
||||||
|
// +1 because `Subtree.len` excludes the subtree itself.
|
||||||
|
assert_eq!(
|
||||||
|
last_subtree_idx + last_subtree.usize_len() + 1,
|
||||||
|
self.index,
|
||||||
|
"called `Cursor::end()` without finishing a subtree"
|
||||||
|
);
|
||||||
|
self.subtrees_stack.pop();
|
||||||
|
last_subtree
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `TokenTree` at the cursor if it is not at the end of a subtree.
|
||||||
|
pub fn token_tree(&self) -> Option<&'a TokenTree<Span>> {
|
||||||
|
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
|
||||||
|
// +1 because `Subtree.len` excludes the subtree itself.
|
||||||
|
if last_subtree_idx + last_subtree.usize_len() + 1 == self.index {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.buffer.get(self.index)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Bump the cursor, and enters a subtree if it is on one.
|
||||||
|
pub fn bump(&mut self) {
|
||||||
|
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
|
||||||
|
// +1 because `Subtree.len` excludes the subtree itself.
|
||||||
|
assert_ne!(
|
||||||
|
last_subtree_idx + last_subtree.usize_len() + 1,
|
||||||
|
self.index,
|
||||||
|
"called `Cursor::bump()` when at the end of a subtree"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if let TokenTree::Subtree(_) = self.buffer[self.index] {
|
||||||
|
self.subtrees_stack.push(self.index);
|
||||||
|
}
|
||||||
|
self.index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bump_or_end(&mut self) {
|
||||||
|
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
|
||||||
|
// +1 because `Subtree.len` excludes the subtree itself.
|
||||||
|
if last_subtree_idx + last_subtree.usize_len() + 1 == self.index {
|
||||||
|
self.subtrees_stack.pop();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// +1 because `Subtree.len` excludes the subtree itself.
|
||||||
|
if let TokenTree::Subtree(_) = self.buffer[self.index] {
|
||||||
|
self.subtrees_stack.push(self.index);
|
||||||
|
}
|
||||||
|
self.index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn peek_two_leaves(&self) -> Option<[&'a Leaf<Span>; 2]> {
|
||||||
|
if let Some((last_subtree_idx, last_subtree)) = self.last_subtree() {
|
||||||
|
// +1 because `Subtree.len` excludes the subtree itself.
|
||||||
|
let last_end = last_subtree_idx + last_subtree.usize_len() + 1;
|
||||||
|
if last_end == self.index || last_end == self.index + 1 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.buffer.get(self.index..self.index + 2).and_then(|it| match it {
|
||||||
|
[TokenTree::Leaf(a), TokenTree::Leaf(b)] => Some([a, b]),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn crossed(&self) -> TokenTreesView<'a, Span> {
|
||||||
|
assert!(self.is_root());
|
||||||
|
TokenTreesView::new(&self.buffer[..self.index])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,51 +1,64 @@
|
||||||
//! A "Parser" structure for token trees. We use this when parsing a declarative
|
//! A "Parser" structure for token trees. We use this when parsing a declarative
|
||||||
//! macro definition into a list of patterns and templates.
|
//! macro definition into a list of patterns and templates.
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
|
|
||||||
use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree};
|
use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree, TokenTreesView};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TtIter<'a, S> {
|
pub struct TtIter<'a, S> {
|
||||||
inner: std::slice::Iter<'a, TokenTree<S>>,
|
inner: std::slice::Iter<'a, TokenTree<S>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, S: Copy> TtIter<'a, S> {
|
impl<S: Copy + fmt::Debug> fmt::Debug for TtIter<'_, S> {
|
||||||
pub fn new(subtree: &'a Subtree<S>) -> TtIter<'a, S> {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
TtIter { inner: subtree.token_trees.iter() }
|
f.debug_struct("TtIter").field("remaining", &self.remaining()).finish()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn new_iter(iter: std::slice::Iter<'a, TokenTree<S>>) -> TtIter<'a, S> {
|
#[derive(Clone, Copy)]
|
||||||
TtIter { inner: iter }
|
pub struct TtIterSavepoint<'a, S>(&'a [TokenTree<S>]);
|
||||||
|
|
||||||
|
impl<'a, S: Copy> TtIterSavepoint<'a, S> {
|
||||||
|
pub fn remaining(self) -> TokenTreesView<'a, S> {
|
||||||
|
TokenTreesView::new(self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, S: Copy> TtIter<'a, S> {
|
||||||
|
pub(crate) fn new(tt: &'a [TokenTree<S>]) -> TtIter<'a, S> {
|
||||||
|
TtIter { inner: tt.iter() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_char(&mut self, char: char) -> Result<(), ()> {
|
pub fn expect_char(&mut self, char: char) -> Result<(), ()> {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(&TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if c == char => Ok(()),
|
Some(TtElement::Leaf(&Leaf::Punct(Punct { char: c, .. }))) if c == char => Ok(()),
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> {
|
pub fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if chars.contains(c) => {
|
Some(TtElement::Leaf(Leaf::Punct(Punct { char: c, .. }))) if chars.contains(c) => {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_subtree(&mut self) -> Result<&'a Subtree<S>, ()> {
|
pub fn expect_subtree(&mut self) -> Result<(&'a Subtree<S>, TtIter<'a, S>), ()> {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(TokenTree::Subtree(it)) => Ok(it),
|
Some(TtElement::Subtree(subtree, iter)) => Ok((subtree, iter)),
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_leaf(&mut self) -> Result<&'a Leaf<S>, ()> {
|
pub fn expect_leaf(&mut self) -> Result<&'a Leaf<S>, ()> {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(TokenTree::Leaf(it)) => Ok(it),
|
Some(TtElement::Leaf(it)) => Ok(it),
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -99,7 +112,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
||||||
/// This method currently may return a single quotation, which is part of lifetime ident and
|
/// This method currently may return a single quotation, which is part of lifetime ident and
|
||||||
/// conceptually not a punct in the context of mbe. Callers should handle this.
|
/// conceptually not a punct in the context of mbe. Callers should handle this.
|
||||||
pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, 3>, ()> {
|
pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, 3>, ()> {
|
||||||
let TokenTree::Leaf(Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
|
let TtElement::Leaf(&Leaf::Punct(first)) = self.next().ok_or(())? else {
|
||||||
return Err(());
|
return Err(());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -147,28 +160,84 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
||||||
}
|
}
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
pub fn peek_n(&self, n: usize) -> Option<&'a TokenTree<S>> {
|
|
||||||
|
/// This method won't check for subtrees, so the nth token tree may not be the nth sibling of the current tree.
|
||||||
|
fn peek_n(&self, n: usize) -> Option<&'a TokenTree<S>> {
|
||||||
self.inner.as_slice().get(n)
|
self.inner.as_slice().get(n)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn peek(&self) -> Option<TtElement<'a, S>> {
|
||||||
|
match self.inner.as_slice().first()? {
|
||||||
|
TokenTree::Leaf(leaf) => Some(TtElement::Leaf(leaf)),
|
||||||
|
TokenTree::Subtree(subtree) => {
|
||||||
|
let nested_iter =
|
||||||
|
TtIter { inner: self.inner.as_slice()[1..][..subtree.usize_len()].iter() };
|
||||||
|
Some(TtElement::Subtree(subtree, nested_iter))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Equivalent to `peek().is_none()`, but a bit faster.
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.inner.len() == 0
|
||||||
|
}
|
||||||
|
|
||||||
pub fn next_span(&self) -> Option<S> {
|
pub fn next_span(&self) -> Option<S> {
|
||||||
Some(self.inner.as_slice().first()?.first_span())
|
Some(self.inner.as_slice().first()?.first_span())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_slice(&self) -> &'a [TokenTree<S>] {
|
pub fn remaining(&self) -> TokenTreesView<'a, S> {
|
||||||
self.inner.as_slice()
|
TokenTreesView::new(self.inner.as_slice())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// **Warning**: This advances `skip` **flat** token trees, subtrees account for children+1!
|
||||||
|
pub fn flat_advance(&mut self, skip: usize) {
|
||||||
|
self.inner = self.inner.as_slice()[skip..].iter();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn savepoint(&self) -> TtIterSavepoint<'a, S> {
|
||||||
|
TtIterSavepoint(self.inner.as_slice())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_savepoint(&self, savepoint: TtIterSavepoint<'a, S>) -> TokenTreesView<'a, S> {
|
||||||
|
let len = (self.inner.as_slice().as_ptr() as usize - savepoint.0.as_ptr() as usize)
|
||||||
|
/ size_of::<TokenTree<S>>();
|
||||||
|
TokenTreesView::new(&savepoint.0[..len])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next_as_view(&mut self) -> Option<TokenTreesView<'a, S>> {
|
||||||
|
let savepoint = self.savepoint();
|
||||||
|
self.next()?;
|
||||||
|
Some(self.from_savepoint(savepoint))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum TtElement<'a, S> {
|
||||||
|
Leaf(&'a Leaf<S>),
|
||||||
|
Subtree(&'a Subtree<S>, TtIter<'a, S>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Copy> TtElement<'_, S> {
|
||||||
|
#[inline]
|
||||||
|
pub fn first_span(&self) -> S {
|
||||||
|
match self {
|
||||||
|
TtElement::Leaf(it) => *it.span(),
|
||||||
|
TtElement::Subtree(it, _) => it.delimiter.open,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, S> Iterator for TtIter<'a, S> {
|
impl<'a, S> Iterator for TtIter<'a, S> {
|
||||||
type Item = &'a TokenTree<S>;
|
type Item = TtElement<'a, S>;
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
self.inner.next()
|
match self.inner.next()? {
|
||||||
|
TokenTree::Leaf(leaf) => Some(TtElement::Leaf(leaf)),
|
||||||
|
TokenTree::Subtree(subtree) => {
|
||||||
|
let nested_iter =
|
||||||
|
TtIter { inner: self.inner.as_slice()[..subtree.usize_len()].iter() };
|
||||||
|
self.inner = self.inner.as_slice()[subtree.usize_len()..].iter();
|
||||||
|
Some(TtElement::Subtree(subtree, nested_iter))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
||||||
self.inner.size_hint()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {}
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
//! `tt` crate defines a `TokenTree` data structure: this is the interface (both
|
//! `tt` crate defines a `TokenTree` data structure: this is the interface (both
|
||||||
//! input and output) of macros. It closely mirrors `proc_macro` crate's
|
//! input and output) of macros.
|
||||||
//! `TokenTree`.
|
//!
|
||||||
|
//! The `TokenTree` is semantically a tree, but for performance reasons it is stored as a flat structure.
|
||||||
|
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
|
|
||||||
|
|
@ -14,7 +15,9 @@ pub mod iter;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
use buffer::Cursor;
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
|
use iter::{TtElement, TtIter};
|
||||||
use stdx::{impl_from, itertools::Itertools as _};
|
use stdx::{impl_from, itertools::Itertools as _};
|
||||||
|
|
||||||
pub use text_size::{TextRange, TextSize};
|
pub use text_size::{TextRange, TextSize};
|
||||||
|
|
@ -75,23 +78,6 @@ pub enum TokenTree<S = u32> {
|
||||||
}
|
}
|
||||||
impl_from!(Leaf<S>, Subtree<S> for TokenTree);
|
impl_from!(Leaf<S>, Subtree<S> for TokenTree);
|
||||||
impl<S: Copy> TokenTree<S> {
|
impl<S: Copy> TokenTree<S> {
|
||||||
pub fn empty(span: S) -> Self {
|
|
||||||
Self::Subtree(Subtree {
|
|
||||||
delimiter: Delimiter::invisible_spanned(span),
|
|
||||||
token_trees: Box::new([]),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn subtree_or_wrap(self, span: DelimSpan<S>) -> Subtree<S> {
|
|
||||||
match self {
|
|
||||||
TokenTree::Leaf(_) => Subtree {
|
|
||||||
delimiter: Delimiter::invisible_delim_spanned(span),
|
|
||||||
token_trees: Box::new([self]),
|
|
||||||
},
|
|
||||||
TokenTree::Subtree(s) => s,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn first_span(&self) -> S {
|
pub fn first_span(&self) -> S {
|
||||||
match self {
|
match self {
|
||||||
TokenTree::Leaf(l) => *l.span(),
|
TokenTree::Leaf(l) => *l.span(),
|
||||||
|
|
@ -118,38 +104,422 @@ impl<S> Leaf<S> {
|
||||||
}
|
}
|
||||||
impl_from!(Literal<S>, Punct<S>, Ident<S> for Leaf);
|
impl_from!(Literal<S>, Punct<S>, Ident<S> for Leaf);
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct Subtree<S> {
|
pub struct Subtree<S> {
|
||||||
pub delimiter: Delimiter<S>,
|
pub delimiter: Delimiter<S>,
|
||||||
pub token_trees: Box<[TokenTree<S>]>,
|
/// Number of following token trees that belong to this subtree, excluding this subtree.
|
||||||
|
pub len: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Copy> Subtree<S> {
|
impl<S> Subtree<S> {
|
||||||
pub fn empty(span: DelimSpan<S>) -> Self {
|
pub fn usize_len(&self) -> usize {
|
||||||
Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: Box::new([]) }
|
self.len as usize
|
||||||
}
|
|
||||||
|
|
||||||
/// This is slow, and should be avoided, as it will always reallocate!
|
|
||||||
pub fn push(&mut self, subtree: TokenTree<S>) {
|
|
||||||
let mut mutable_trees = std::mem::take(&mut self.token_trees).into_vec();
|
|
||||||
|
|
||||||
// Reserve exactly space for one element, to avoid `into_boxed_slice` having to reallocate again.
|
|
||||||
mutable_trees.reserve_exact(1);
|
|
||||||
mutable_trees.push(subtree);
|
|
||||||
|
|
||||||
self.token_trees = mutable_trees.into_boxed_slice();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct SubtreeBuilder<S> {
|
pub struct TopSubtree<S>(pub Box<[TokenTree<S>]>);
|
||||||
pub delimiter: Delimiter<S>,
|
|
||||||
pub token_trees: Vec<TokenTree<S>>,
|
impl<S: Copy> TopSubtree<S> {
|
||||||
|
pub fn empty(span: DelimSpan<S>) -> Self {
|
||||||
|
Self(Box::new([TokenTree::Subtree(Subtree {
|
||||||
|
delimiter: Delimiter::invisible_delim_spanned(span),
|
||||||
|
len: 0,
|
||||||
|
})]))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn invisible_from_leaves<const N: usize>(delim_span: S, leaves: [Leaf<S>; N]) -> Self {
|
||||||
|
let mut builder = TopSubtreeBuilder::new(Delimiter::invisible_spanned(delim_span));
|
||||||
|
builder.extend(leaves);
|
||||||
|
builder.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_token_trees(delimiter: Delimiter<S>, token_trees: TokenTreesView<'_, S>) -> Self {
|
||||||
|
let mut builder = TopSubtreeBuilder::new(delimiter);
|
||||||
|
builder.extend_with_tt(token_trees);
|
||||||
|
builder.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_subtree(subtree: SubtreeView<'_, S>) -> Self {
|
||||||
|
Self(subtree.0.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn view(&self) -> SubtreeView<'_, S> {
|
||||||
|
SubtreeView::new(&self.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> TtIter<'_, S> {
|
||||||
|
self.view().iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn top_subtree(&self) -> &Subtree<S> {
|
||||||
|
self.view().top_subtree()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn top_subtree_delimiter_mut(&mut self) -> &mut Delimiter<S> {
|
||||||
|
let TokenTree::Subtree(subtree) = &mut self.0[0] else {
|
||||||
|
unreachable!("the first token tree is always the top subtree");
|
||||||
|
};
|
||||||
|
&mut subtree.delimiter
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn token_trees(&self) -> TokenTreesView<'_, S> {
|
||||||
|
self.view().token_trees()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> SubtreeBuilder<S> {
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub fn build(self) -> Subtree<S> {
|
pub struct TopSubtreeBuilder<S> {
|
||||||
Subtree { delimiter: self.delimiter, token_trees: self.token_trees.into_boxed_slice() }
|
unclosed_subtree_indices: Vec<usize>,
|
||||||
|
token_trees: Vec<TokenTree<S>>,
|
||||||
|
last_closed_subtree: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Copy> TopSubtreeBuilder<S> {
|
||||||
|
pub fn new(top_delimiter: Delimiter<S>) -> Self {
|
||||||
|
let mut result = Self {
|
||||||
|
unclosed_subtree_indices: Vec::new(),
|
||||||
|
token_trees: Vec::new(),
|
||||||
|
last_closed_subtree: None,
|
||||||
|
};
|
||||||
|
let top_subtree = TokenTree::Subtree(Subtree { delimiter: top_delimiter, len: 0 });
|
||||||
|
result.token_trees.push(top_subtree);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn open(&mut self, delimiter_kind: DelimiterKind, open_span: S) {
|
||||||
|
self.unclosed_subtree_indices.push(self.token_trees.len());
|
||||||
|
self.token_trees.push(TokenTree::Subtree(Subtree {
|
||||||
|
delimiter: Delimiter {
|
||||||
|
open: open_span,
|
||||||
|
close: open_span, // Will be overwritten on close.
|
||||||
|
kind: delimiter_kind,
|
||||||
|
},
|
||||||
|
len: 0,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn close(&mut self, close_span: S) {
|
||||||
|
let last_unclosed_index = self
|
||||||
|
.unclosed_subtree_indices
|
||||||
|
.pop()
|
||||||
|
.expect("attempt to close a `tt::Subtree` when none is open");
|
||||||
|
let subtree_len = (self.token_trees.len() - last_unclosed_index - 1) as u32;
|
||||||
|
let TokenTree::Subtree(subtree) = &mut self.token_trees[last_unclosed_index] else {
|
||||||
|
unreachable!("unclosed token tree is always a subtree");
|
||||||
|
};
|
||||||
|
subtree.len = subtree_len;
|
||||||
|
subtree.delimiter.close = close_span;
|
||||||
|
self.last_closed_subtree = Some(last_unclosed_index);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// You cannot call this consecutively, it will only work once after close.
|
||||||
|
pub fn remove_last_subtree_if_invisible(&mut self) {
|
||||||
|
let Some(last_subtree_idx) = self.last_closed_subtree else { return };
|
||||||
|
if let TokenTree::Subtree(Subtree {
|
||||||
|
delimiter: Delimiter { kind: DelimiterKind::Invisible, .. },
|
||||||
|
..
|
||||||
|
}) = self.token_trees[last_subtree_idx]
|
||||||
|
{
|
||||||
|
self.token_trees.remove(last_subtree_idx);
|
||||||
|
self.last_closed_subtree = None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push(&mut self, leaf: Leaf<S>) {
|
||||||
|
self.token_trees.push(TokenTree::Leaf(leaf));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extend(&mut self, leaves: impl IntoIterator<Item = Leaf<S>>) {
|
||||||
|
self.token_trees.extend(leaves.into_iter().map(TokenTree::Leaf));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This does not check the token trees are valid, beware!
|
||||||
|
pub fn extend_tt_dangerous(&mut self, tt: impl IntoIterator<Item = TokenTree<S>>) {
|
||||||
|
self.token_trees.extend(tt);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extend_with_tt(&mut self, tt: TokenTreesView<'_, S>) {
|
||||||
|
self.token_trees.extend(tt.0.iter().cloned());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expected_delimiter(&self) -> Option<&Delimiter<S>> {
|
||||||
|
self.unclosed_subtree_indices.last().map(|&subtree_idx| {
|
||||||
|
let TokenTree::Subtree(subtree) = &self.token_trees[subtree_idx] else {
|
||||||
|
unreachable!("unclosed token tree is always a subtree")
|
||||||
|
};
|
||||||
|
&subtree.delimiter
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts unclosed subtree to a punct of their open delimiter.
|
||||||
|
// FIXME: This is incorrect to do, delimiters can never be puncts. See #18244.
|
||||||
|
pub fn flatten_unclosed_subtrees(&mut self) {
|
||||||
|
for &subtree_idx in &self.unclosed_subtree_indices {
|
||||||
|
let TokenTree::Subtree(subtree) = &self.token_trees[subtree_idx] else {
|
||||||
|
unreachable!("unclosed token tree is always a subtree")
|
||||||
|
};
|
||||||
|
let char = match subtree.delimiter.kind {
|
||||||
|
DelimiterKind::Parenthesis => '(',
|
||||||
|
DelimiterKind::Brace => '{',
|
||||||
|
DelimiterKind::Bracket => '[',
|
||||||
|
DelimiterKind::Invisible => '$',
|
||||||
|
};
|
||||||
|
self.token_trees[subtree_idx] = TokenTree::Leaf(Leaf::Punct(Punct {
|
||||||
|
char,
|
||||||
|
spacing: Spacing::Alone,
|
||||||
|
span: subtree.delimiter.open,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
self.unclosed_subtree_indices.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds, and remove the top subtree if it has only one subtree child.
|
||||||
|
pub fn build_skip_top_subtree(mut self) -> TopSubtree<S> {
|
||||||
|
let top_tts = TokenTreesView::new(&self.token_trees[1..]);
|
||||||
|
match top_tts.try_into_subtree() {
|
||||||
|
Some(_) => {
|
||||||
|
assert!(
|
||||||
|
self.unclosed_subtree_indices.is_empty(),
|
||||||
|
"attempt to build an unbalanced `TopSubtreeBuilder`"
|
||||||
|
);
|
||||||
|
TopSubtree(self.token_trees.drain(1..).collect())
|
||||||
|
}
|
||||||
|
None => self.build(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(mut self) -> TopSubtree<S> {
|
||||||
|
assert!(
|
||||||
|
self.unclosed_subtree_indices.is_empty(),
|
||||||
|
"attempt to build an unbalanced `TopSubtreeBuilder`"
|
||||||
|
);
|
||||||
|
let total_len = self.token_trees.len() as u32;
|
||||||
|
let TokenTree::Subtree(top_subtree) = &mut self.token_trees[0] else {
|
||||||
|
unreachable!("first token tree is always a subtree");
|
||||||
|
};
|
||||||
|
top_subtree.len = total_len - 1;
|
||||||
|
TopSubtree(self.token_trees.into_boxed_slice())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn restore_point(&self) -> SubtreeBuilderRestorePoint {
|
||||||
|
SubtreeBuilderRestorePoint {
|
||||||
|
unclosed_subtree_indices_len: self.unclosed_subtree_indices.len(),
|
||||||
|
token_trees_len: self.token_trees.len(),
|
||||||
|
last_closed_subtree: self.last_closed_subtree,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn restore(&mut self, restore_point: SubtreeBuilderRestorePoint) {
|
||||||
|
self.unclosed_subtree_indices.truncate(restore_point.unclosed_subtree_indices_len);
|
||||||
|
self.token_trees.truncate(restore_point.token_trees_len);
|
||||||
|
self.last_closed_subtree = restore_point.last_closed_subtree;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct SubtreeBuilderRestorePoint {
|
||||||
|
unclosed_subtree_indices_len: usize,
|
||||||
|
token_trees_len: usize,
|
||||||
|
last_closed_subtree: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct TokenTreesView<'a, S>(&'a [TokenTree<S>]);
|
||||||
|
|
||||||
|
impl<'a, S: Copy> TokenTreesView<'a, S> {
|
||||||
|
pub fn new(tts: &'a [TokenTree<S>]) -> Self {
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
tts.iter().enumerate().for_each(|(idx, tt)| {
|
||||||
|
if let TokenTree::Subtree(tt) = &tt {
|
||||||
|
// `<` and not `<=` because `Subtree.len` does not include the subtree node itself.
|
||||||
|
debug_assert!(
|
||||||
|
idx + tt.usize_len() < tts.len(),
|
||||||
|
"`TokenTreeView::new()` was given a cut-in-half list"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Self(tts)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> TtIter<'a, S> {
|
||||||
|
TtIter::new(self.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cursor(&self) -> Cursor<'a, S> {
|
||||||
|
Cursor::new(self.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.0.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.0.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn try_into_subtree(self) -> Option<SubtreeView<'a, S>> {
|
||||||
|
if let Some(TokenTree::Subtree(subtree)) = self.0.first() {
|
||||||
|
if subtree.usize_len() == (self.0.len() - 1) {
|
||||||
|
return Some(SubtreeView::new(self.0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn strip_invisible(self) -> TokenTreesView<'a, S> {
|
||||||
|
self.try_into_subtree().map(|subtree| subtree.strip_invisible()).unwrap_or(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This returns a **flat** structure of tokens (subtrees will be represented by a single node
|
||||||
|
/// preceding their children), so it isn't suited for most use cases, only for matching leaves
|
||||||
|
/// at the beginning/end with no subtrees before them. If you need a structured pass, use [`TtIter`].
|
||||||
|
pub fn flat_tokens(&self) -> &'a [TokenTree<S>] {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn split(
|
||||||
|
self,
|
||||||
|
mut split_fn: impl FnMut(TtElement<'a, S>) -> bool,
|
||||||
|
) -> impl Iterator<Item = TokenTreesView<'a, S>> {
|
||||||
|
let mut subtree_iter = self.iter();
|
||||||
|
let mut need_to_yield_even_if_empty = true;
|
||||||
|
let result = std::iter::from_fn(move || {
|
||||||
|
if subtree_iter.is_empty() && !need_to_yield_even_if_empty {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
need_to_yield_even_if_empty = false;
|
||||||
|
let savepoint = subtree_iter.savepoint();
|
||||||
|
let mut result = subtree_iter.from_savepoint(savepoint);
|
||||||
|
while let Some(tt) = subtree_iter.next() {
|
||||||
|
if split_fn(tt) {
|
||||||
|
need_to_yield_even_if_empty = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
result = subtree_iter.from_savepoint(savepoint);
|
||||||
|
}
|
||||||
|
Some(result)
|
||||||
|
});
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: fmt::Debug + Copy> fmt::Debug for TokenTreesView<'_, S> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let mut iter = self.iter();
|
||||||
|
while let Some(tt) = iter.next() {
|
||||||
|
print_debug_token(f, 0, tt)?;
|
||||||
|
if !iter.is_empty() {
|
||||||
|
writeln!(f)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Copy> fmt::Display for TokenTreesView<'_, S> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
return token_trees_display(f, self.iter());
|
||||||
|
|
||||||
|
fn subtree_display<S>(
|
||||||
|
subtree: &Subtree<S>,
|
||||||
|
f: &mut fmt::Formatter<'_>,
|
||||||
|
iter: TtIter<'_, S>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
let (l, r) = match subtree.delimiter.kind {
|
||||||
|
DelimiterKind::Parenthesis => ("(", ")"),
|
||||||
|
DelimiterKind::Brace => ("{", "}"),
|
||||||
|
DelimiterKind::Bracket => ("[", "]"),
|
||||||
|
DelimiterKind::Invisible => ("", ""),
|
||||||
|
};
|
||||||
|
f.write_str(l)?;
|
||||||
|
token_trees_display(f, iter)?;
|
||||||
|
f.write_str(r)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn token_trees_display<S>(f: &mut fmt::Formatter<'_>, iter: TtIter<'_, S>) -> fmt::Result {
|
||||||
|
let mut needs_space = false;
|
||||||
|
for child in iter {
|
||||||
|
if needs_space {
|
||||||
|
f.write_str(" ")?;
|
||||||
|
}
|
||||||
|
needs_space = true;
|
||||||
|
|
||||||
|
match child {
|
||||||
|
TtElement::Leaf(Leaf::Punct(p)) => {
|
||||||
|
needs_space = p.spacing == Spacing::Alone;
|
||||||
|
fmt::Display::fmt(p, f)?;
|
||||||
|
}
|
||||||
|
TtElement::Leaf(leaf) => fmt::Display::fmt(leaf, f)?,
|
||||||
|
TtElement::Subtree(subtree, subtree_iter) => {
|
||||||
|
subtree_display(subtree, f, subtree_iter)?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
// Invariant: always starts with `Subtree` that covers the entire thing.
|
||||||
|
pub struct SubtreeView<'a, S>(&'a [TokenTree<S>]);
|
||||||
|
|
||||||
|
impl<'a, S: Copy> SubtreeView<'a, S> {
|
||||||
|
pub fn new(tts: &'a [TokenTree<S>]) -> Self {
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
let TokenTree::Subtree(subtree) = &tts[0] else {
|
||||||
|
panic!("first token tree must be a subtree in `SubtreeView`");
|
||||||
|
};
|
||||||
|
assert_eq!(
|
||||||
|
subtree.usize_len(),
|
||||||
|
tts.len() - 1,
|
||||||
|
"subtree must cover the entire `SubtreeView`"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Self(tts)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_token_trees(self) -> TokenTreesView<'a, S> {
|
||||||
|
TokenTreesView::new(self.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> TtIter<'a, S> {
|
||||||
|
TtIter::new(&self.0[1..])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn top_subtree(&self) -> &'a Subtree<S> {
|
||||||
|
let TokenTree::Subtree(subtree) = &self.0[0] else {
|
||||||
|
unreachable!("the first token tree is always the top subtree");
|
||||||
|
};
|
||||||
|
subtree
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn strip_invisible(&self) -> TokenTreesView<'a, S> {
|
||||||
|
if self.top_subtree().delimiter.kind == DelimiterKind::Invisible {
|
||||||
|
TokenTreesView::new(&self.0[1..])
|
||||||
|
} else {
|
||||||
|
TokenTreesView::new(self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn token_trees(&self) -> TokenTreesView<'a, S> {
|
||||||
|
TokenTreesView::new(&self.0[1..])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: fmt::Debug + Copy> fmt::Debug for SubtreeView<'_, S> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
fmt::Debug::fmt(&TokenTreesView(self.0), f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Copy> fmt::Display for SubtreeView<'_, S> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(&TokenTreesView(self.0), f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -348,6 +718,7 @@ fn print_debug_subtree<S: fmt::Debug>(
|
||||||
f: &mut fmt::Formatter<'_>,
|
f: &mut fmt::Formatter<'_>,
|
||||||
subtree: &Subtree<S>,
|
subtree: &Subtree<S>,
|
||||||
level: usize,
|
level: usize,
|
||||||
|
iter: TtIter<'_, S>,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
let align = " ".repeat(level);
|
let align = " ".repeat(level);
|
||||||
|
|
||||||
|
|
@ -363,14 +734,9 @@ fn print_debug_subtree<S: fmt::Debug>(
|
||||||
fmt::Debug::fmt(&open, f)?;
|
fmt::Debug::fmt(&open, f)?;
|
||||||
write!(f, " ")?;
|
write!(f, " ")?;
|
||||||
fmt::Debug::fmt(&close, f)?;
|
fmt::Debug::fmt(&close, f)?;
|
||||||
if !subtree.token_trees.is_empty() {
|
for child in iter {
|
||||||
writeln!(f)?;
|
writeln!(f)?;
|
||||||
for (idx, child) in subtree.token_trees.iter().enumerate() {
|
print_debug_token(f, level + 1, child)?;
|
||||||
print_debug_token(f, child, level + 1)?;
|
|
||||||
if idx != subtree.token_trees.len() - 1 {
|
|
||||||
writeln!(f)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -378,13 +744,13 @@ fn print_debug_subtree<S: fmt::Debug>(
|
||||||
|
|
||||||
fn print_debug_token<S: fmt::Debug>(
|
fn print_debug_token<S: fmt::Debug>(
|
||||||
f: &mut fmt::Formatter<'_>,
|
f: &mut fmt::Formatter<'_>,
|
||||||
tkn: &TokenTree<S>,
|
|
||||||
level: usize,
|
level: usize,
|
||||||
|
tt: TtElement<'_, S>,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
let align = " ".repeat(level);
|
let align = " ".repeat(level);
|
||||||
|
|
||||||
match tkn {
|
match tt {
|
||||||
TokenTree::Leaf(leaf) => match leaf {
|
TtElement::Leaf(leaf) => match leaf {
|
||||||
Leaf::Literal(lit) => {
|
Leaf::Literal(lit) => {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
|
|
@ -417,54 +783,23 @@ fn print_debug_token<S: fmt::Debug>(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenTree::Subtree(subtree) => {
|
TtElement::Subtree(subtree, subtree_iter) => {
|
||||||
print_debug_subtree(f, subtree, level)?;
|
print_debug_subtree(f, subtree, level, subtree_iter)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: fmt::Debug> fmt::Debug for Subtree<S> {
|
impl<S: fmt::Debug + Copy> fmt::Debug for TopSubtree<S> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
print_debug_subtree(f, self, 0)
|
fmt::Debug::fmt(&self.view(), f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> fmt::Display for TokenTree<S> {
|
impl<S: fmt::Display + Copy> fmt::Display for TopSubtree<S> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
fmt::Display::fmt(&self.view(), f)
|
||||||
TokenTree::Leaf(it) => fmt::Display::fmt(it, f),
|
|
||||||
TokenTree::Subtree(it) => fmt::Display::fmt(it, f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> fmt::Display for Subtree<S> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let (l, r) = match self.delimiter.kind {
|
|
||||||
DelimiterKind::Parenthesis => ("(", ")"),
|
|
||||||
DelimiterKind::Brace => ("{", "}"),
|
|
||||||
DelimiterKind::Bracket => ("[", "]"),
|
|
||||||
DelimiterKind::Invisible => ("", ""),
|
|
||||||
};
|
|
||||||
f.write_str(l)?;
|
|
||||||
let mut needs_space = false;
|
|
||||||
for tt in self.token_trees.iter() {
|
|
||||||
if needs_space {
|
|
||||||
f.write_str(" ")?;
|
|
||||||
}
|
|
||||||
needs_space = true;
|
|
||||||
match tt {
|
|
||||||
TokenTree::Leaf(Leaf::Punct(p)) => {
|
|
||||||
needs_space = p.spacing == Spacing::Alone;
|
|
||||||
fmt::Display::fmt(p, f)?;
|
|
||||||
}
|
|
||||||
tt => fmt::Display::fmt(tt, f)?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
f.write_str(r)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -538,34 +873,45 @@ impl<S> fmt::Display for Punct<S> {
|
||||||
impl<S> Subtree<S> {
|
impl<S> Subtree<S> {
|
||||||
/// Count the number of tokens recursively
|
/// Count the number of tokens recursively
|
||||||
pub fn count(&self) -> usize {
|
pub fn count(&self) -> usize {
|
||||||
let children_count = self
|
self.usize_len()
|
||||||
.token_trees
|
|
||||||
.iter()
|
|
||||||
.map(|c| match c {
|
|
||||||
TokenTree::Subtree(c) => c.count(),
|
|
||||||
TokenTree::Leaf(_) => 0,
|
|
||||||
})
|
|
||||||
.sum::<usize>();
|
|
||||||
|
|
||||||
self.token_trees.len() + children_count
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Subtree<S> {
|
impl<S> TopSubtree<S> {
|
||||||
/// A simple line string used for debugging
|
/// A simple line string used for debugging
|
||||||
pub fn as_debug_string(&self) -> String {
|
pub fn subtree_as_debug_string(&self, subtree_idx: usize) -> String {
|
||||||
let delim = match self.delimiter.kind {
|
fn debug_subtree<S>(
|
||||||
|
output: &mut String,
|
||||||
|
subtree: &Subtree<S>,
|
||||||
|
iter: &mut std::slice::Iter<'_, TokenTree<S>>,
|
||||||
|
) {
|
||||||
|
let delim = match subtree.delimiter.kind {
|
||||||
DelimiterKind::Brace => ("{", "}"),
|
DelimiterKind::Brace => ("{", "}"),
|
||||||
DelimiterKind::Bracket => ("[", "]"),
|
DelimiterKind::Bracket => ("[", "]"),
|
||||||
DelimiterKind::Parenthesis => ("(", ")"),
|
DelimiterKind::Parenthesis => ("(", ")"),
|
||||||
DelimiterKind::Invisible => ("$", "$"),
|
DelimiterKind::Invisible => ("$", "$"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut res = String::new();
|
output.push_str(delim.0);
|
||||||
res.push_str(delim.0);
|
|
||||||
let mut last = None;
|
let mut last = None;
|
||||||
for child in self.token_trees.iter() {
|
let mut idx = 0;
|
||||||
let s = match child {
|
while idx < subtree.len {
|
||||||
|
let child = iter.next().unwrap();
|
||||||
|
debug_token_tree(output, child, last, iter);
|
||||||
|
last = Some(child);
|
||||||
|
idx += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push_str(delim.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn debug_token_tree<S>(
|
||||||
|
output: &mut String,
|
||||||
|
tt: &TokenTree<S>,
|
||||||
|
last: Option<&TokenTree<S>>,
|
||||||
|
iter: &mut std::slice::Iter<'_, TokenTree<S>>,
|
||||||
|
) {
|
||||||
|
match tt {
|
||||||
TokenTree::Leaf(it) => {
|
TokenTree::Leaf(it) => {
|
||||||
let s = match it {
|
let s = match it {
|
||||||
Leaf::Literal(it) => it.symbol.to_string(),
|
Leaf::Literal(it) => it.symbol.to_string(),
|
||||||
|
|
@ -574,31 +920,37 @@ impl<S> Subtree<S> {
|
||||||
};
|
};
|
||||||
match (it, last) {
|
match (it, last) {
|
||||||
(Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => {
|
(Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => {
|
||||||
" ".to_owned() + &s
|
output.push(' ');
|
||||||
|
output.push_str(&s);
|
||||||
}
|
}
|
||||||
(Leaf::Punct(_), Some(TokenTree::Leaf(Leaf::Punct(punct)))) => {
|
(Leaf::Punct(_), Some(TokenTree::Leaf(Leaf::Punct(punct)))) => {
|
||||||
if punct.spacing == Spacing::Alone {
|
if punct.spacing == Spacing::Alone {
|
||||||
" ".to_owned() + &s
|
output.push(' ');
|
||||||
|
output.push_str(&s);
|
||||||
} else {
|
} else {
|
||||||
s
|
output.push_str(&s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => s,
|
_ => output.push_str(&s),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenTree::Subtree(it) => it.as_debug_string(),
|
TokenTree::Subtree(it) => debug_subtree(output, it, iter),
|
||||||
};
|
}
|
||||||
res.push_str(&s);
|
|
||||||
last = Some(child);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res.push_str(delim.1);
|
let mut res = String::new();
|
||||||
|
debug_token_tree(
|
||||||
|
&mut res,
|
||||||
|
&self.0[subtree_idx],
|
||||||
|
None,
|
||||||
|
&mut self.0[subtree_idx + 1..].iter(),
|
||||||
|
);
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String {
|
pub fn pretty<S>(mut tkns: &[TokenTree<S>]) -> String {
|
||||||
fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String {
|
fn tokentree_to_text<S>(tkn: &TokenTree<S>, tkns: &mut &[TokenTree<S>]) -> String {
|
||||||
match tkn {
|
match tkn {
|
||||||
TokenTree::Leaf(Leaf::Ident(ident)) => {
|
TokenTree::Leaf(Leaf::Ident(ident)) => {
|
||||||
format!("{}{}", ident.is_raw.as_str(), ident.sym)
|
format!("{}{}", ident.is_raw.as_str(), ident.sym)
|
||||||
|
|
@ -606,7 +958,9 @@ pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String {
|
||||||
TokenTree::Leaf(Leaf::Literal(literal)) => format!("{literal}"),
|
TokenTree::Leaf(Leaf::Literal(literal)) => format!("{literal}"),
|
||||||
TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char),
|
TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char),
|
||||||
TokenTree::Subtree(subtree) => {
|
TokenTree::Subtree(subtree) => {
|
||||||
let content = pretty(&subtree.token_trees);
|
let (subtree_content, rest) = tkns.split_at(subtree.usize_len());
|
||||||
|
let content = pretty(subtree_content);
|
||||||
|
*tkns = rest;
|
||||||
let (open, close) = match subtree.delimiter.kind {
|
let (open, close) = match subtree.delimiter.kind {
|
||||||
DelimiterKind::Brace => ("{", "}"),
|
DelimiterKind::Brace => ("{", "}"),
|
||||||
DelimiterKind::Bracket => ("[", "]"),
|
DelimiterKind::Bracket => ("[", "]"),
|
||||||
|
|
@ -618,16 +972,18 @@ pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tkns.iter()
|
let mut last = String::new();
|
||||||
.fold((String::new(), true), |(last, last_to_joint), tkn| {
|
let mut last_to_joint = true;
|
||||||
let s = [last, tokentree_to_text(tkn)].join(if last_to_joint { "" } else { " " });
|
|
||||||
let mut is_joint = false;
|
while let Some((tkn, rest)) = tkns.split_first() {
|
||||||
|
tkns = rest;
|
||||||
|
last = [last, tokentree_to_text(tkn, &mut tkns)].join(if last_to_joint { "" } else { " " });
|
||||||
|
last_to_joint = false;
|
||||||
if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn {
|
if let TokenTree::Leaf(Leaf::Punct(punct)) = tkn {
|
||||||
if punct.spacing == Spacing::Joint {
|
if punct.spacing == Spacing::Joint {
|
||||||
is_joint = true;
|
last_to_joint = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(s, is_joint)
|
}
|
||||||
})
|
last
|
||||||
.0
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue