Merge pull request #18327 from ChayimFriedman2/flat-tt

Store token trees in contiguous `Vec` instead of as a tree
This commit is contained in:
Lukas Wirth 2025-01-03 11:31:58 +00:00 committed by GitHub
commit b6910ed1b2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
50 changed files with 2356 additions and 2286 deletions

View file

@ -9,7 +9,7 @@ use libloading::Library;
use object::Object;
use paths::{Utf8Path, Utf8PathBuf};
use crate::{proc_macros::ProcMacros, ProcMacroKind, ProcMacroSrvSpan};
use crate::{proc_macros::ProcMacros, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan};
/// Loads dynamic library in platform dependent manner.
///
@ -125,12 +125,12 @@ impl Expander {
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
macro_body: tt::Subtree<S>,
attributes: Option<tt::Subtree<S>>,
macro_body: TopSubtree<S>,
attributes: Option<TopSubtree<S>>,
def_site: S,
call_site: S,
mixed_site: S,
) -> Result<tt::Subtree<S>, String>
) -> Result<TopSubtree<S>, String>
where
<S::Server as bridge::server::Types>::TokenStream: Default,
{

View file

@ -72,12 +72,12 @@ impl ProcMacroSrv<'_> {
env: Vec<(String, String)>,
current_dir: Option<impl AsRef<Path>>,
macro_name: String,
macro_body: tt::Subtree<S>,
attribute: Option<tt::Subtree<S>>,
macro_body: tt::TopSubtree<S>,
attribute: Option<tt::TopSubtree<S>>,
def_site: S,
call_site: S,
mixed_site: S,
) -> Result<tt::Subtree<S>, String> {
) -> Result<Vec<tt::TokenTree<S>>, String> {
let snapped_env = self.env;
let expander =
self.expander(lib.as_ref()).map_err(|err| format!("failed to load macro: {err}"))?;
@ -91,14 +91,16 @@ impl ProcMacroSrv<'_> {
.stack_size(EXPANDER_STACK_SIZE)
.name(macro_name.clone())
.spawn_scoped(s, move || {
expander.expand(
&macro_name,
macro_body,
attribute,
def_site,
call_site,
mixed_site,
)
expander
.expand(
&macro_name,
server_impl::TopSubtree(macro_body.0.into_vec()),
attribute.map(|it| server_impl::TopSubtree(it.0.into_vec())),
def_site,
call_site,
mixed_site,
)
.map(|tt| tt.0)
});
let res = match thread {
Ok(handle) => handle.join(),

View file

@ -4,7 +4,9 @@ use proc_macro::bridge;
use libloading::Library;
use crate::{dylib::LoadProcMacroDylibError, ProcMacroKind, ProcMacroSrvSpan};
use crate::{
dylib::LoadProcMacroDylibError, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan,
};
#[repr(transparent)]
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
@ -41,12 +43,12 @@ impl ProcMacros {
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
macro_body: tt::Subtree<S>,
attributes: Option<tt::Subtree<S>>,
macro_body: TopSubtree<S>,
attributes: Option<TopSubtree<S>>,
def_site: S,
call_site: S,
mixed_site: S,
) -> Result<tt::Subtree<S>, crate::PanicMessage> {
) -> Result<TopSubtree<S>, crate::PanicMessage> {
let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
let parsed_attributes = attributes

View file

@ -8,6 +8,8 @@
//!
//! FIXME: No span and source file information is implemented yet
use std::fmt;
use proc_macro::bridge;
mod token_stream;
@ -19,6 +21,32 @@ pub mod token_id;
// pub use symbol::*;
use tt::Spacing;
#[derive(Clone)]
pub(crate) struct TopSubtree<S>(pub(crate) Vec<tt::TokenTree<S>>);
impl<S: Copy + fmt::Debug> fmt::Debug for TopSubtree<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&tt::TokenTreesView::new(&self.0), f)
}
}
impl<S: Copy> TopSubtree<S> {
pub(crate) fn top_subtree(&self) -> &tt::Subtree<S> {
let tt::TokenTree::Subtree(subtree) = &self.0[0] else {
unreachable!("the first token tree is always the top subtree");
};
subtree
}
pub(crate) fn from_bridge(group: bridge::Group<TokenStream<S>, S>) -> Self {
let delimiter = delim_to_internal(group.delimiter, group.span);
let mut tts =
group.stream.map(|it| it.token_trees).unwrap_or_else(|| Vec::with_capacity(1));
tts.insert(0, tt::TokenTree::Subtree(tt::Subtree { delimiter, len: tts.len() as u32 }));
TopSubtree(tts)
}
}
fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {
let kind = match d {
proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,

View file

@ -6,7 +6,6 @@
//! change their representation to be compatible with rust-analyzer's.
use std::{
collections::{HashMap, HashSet},
iter,
ops::{Bound, Range},
};
@ -15,14 +14,10 @@ use proc_macro::bridge::{self, server};
use span::{FileId, Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
use tt::{TextRange, TextSize};
use crate::server_impl::{
delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal,
token_stream::TokenStreamBuilder,
};
use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree};
mod tt {
pub use tt::*;
pub type Subtree = ::tt::Subtree<super::Span>;
pub type TokenTree = ::tt::TokenTree<super::Span>;
pub type Leaf = ::tt::Leaf<super::Span>;
pub type Literal = ::tt::Literal<super::Span>;
@ -161,15 +156,8 @@ impl server::TokenStream for RaSpanServer {
) -> Self::TokenStream {
match tree {
bridge::TokenTree::Group(group) => {
let group = tt::Subtree {
delimiter: delim_to_internal(group.delimiter, group.span),
token_trees: match group.stream {
Some(stream) => stream.into_iter().collect(),
None => Box::new([]),
},
};
let tree = tt::TokenTree::from(group);
Self::TokenStream::from_iter(iter::once(tree))
let group = TopSubtree::from_bridge(group);
TokenStream { token_trees: group.0 }
}
bridge::TokenTree::Ident(ident) => {
@ -181,7 +169,7 @@ impl server::TokenStream for RaSpanServer {
};
let leaf = tt::Leaf::from(ident);
let tree = tt::TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree))
TokenStream { token_trees: vec![tree] }
}
bridge::TokenTree::Literal(literal) => {
@ -194,7 +182,7 @@ impl server::TokenStream for RaSpanServer {
let leaf: tt::Leaf = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree))
TokenStream { token_trees: vec![tree] }
}
bridge::TokenTree::Punct(p) => {
@ -205,7 +193,7 @@ impl server::TokenStream for RaSpanServer {
};
let leaf = tt::Leaf::from(punct);
let tree = tt::TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree))
TokenStream { token_trees: vec![tree] }
}
}
}
@ -253,42 +241,7 @@ impl server::TokenStream for RaSpanServer {
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
stream
.into_iter()
.map(|tree| match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
bridge::TokenTree::Ident(bridge::Ident {
sym: ident.sym,
is_raw: ident.is_raw.yes(),
span: ident.span,
})
}
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal {
span: lit.span,
kind: literal_kind_to_external(lit.kind),
symbol: lit.symbol,
suffix: lit.suffix,
})
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
bridge::TokenTree::Punct(bridge::Punct {
ch: punct.char as u8,
joint: punct.spacing == tt::Spacing::Joint,
span: punct.span,
})
}
tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
delimiter: delim_to_external(subtree.delimiter),
stream: if subtree.token_trees.is_empty() {
None
} else {
Some(subtree.token_trees.into_vec().into_iter().collect())
},
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
}),
})
.collect()
stream.into_bridge()
}
}
@ -507,13 +460,14 @@ mod tests {
close: span,
kind: tt::DelimiterKind::Brace,
},
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
kind: tt::LitKind::Str,
symbol: Symbol::intern("string"),
suffix: None,
span,
}))]),
len: 1,
}),
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
kind: tt::LitKind::Str,
symbol: Symbol::intern("string"),
suffix: None,
span,
})),
],
};
@ -530,35 +484,38 @@ mod tests {
},
ctx: SyntaxContextId::ROOT,
};
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: span,
close: span,
kind: tt::DelimiterKind::Parenthesis,
},
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
let subtree_paren_a = vec![
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: span,
close: span,
kind: tt::DelimiterKind::Parenthesis,
},
len: 1,
}),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
is_raw: tt::IdentIsRaw::No,
sym: Symbol::intern("a"),
span,
}))]),
});
})),
];
let t1 = TokenStream::from_str("(a)", span).unwrap();
assert_eq!(t1.token_trees.len(), 1);
assert_eq!(t1.token_trees[0], subtree_paren_a);
assert_eq!(t1.token_trees.len(), 2);
assert!(t1.token_trees == subtree_paren_a);
let t2 = TokenStream::from_str("(a);", span).unwrap();
assert_eq!(t2.token_trees.len(), 2);
assert_eq!(t2.token_trees[0], subtree_paren_a);
assert_eq!(t2.token_trees.len(), 3);
assert!(t2.token_trees[0..2] == subtree_paren_a);
let underscore = TokenStream::from_str("_", span).unwrap();
assert_eq!(
underscore.token_trees[0],
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("_"),
span,
is_raw: tt::IdentIsRaw::No,
}))
assert!(
underscore.token_trees[0]
== tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("_"),
span,
is_raw: tt::IdentIsRaw::No,
}))
);
}
}

View file

@ -1,30 +1,22 @@
//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span.
//! This backend is rather inflexible, used by RustRover and older rust-analyzer versions.
use std::{
iter,
ops::{Bound, Range},
};
use std::ops::{Bound, Range};
use intern::Symbol;
use proc_macro::bridge::{self, server};
use crate::server_impl::{
delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal,
token_stream::TokenStreamBuilder,
};
use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree};
mod tt {
pub use span::TokenId;
pub use tt::*;
pub type Subtree = ::tt::Subtree<TokenId>;
pub type TokenTree = ::tt::TokenTree<TokenId>;
pub type Leaf = ::tt::Leaf<TokenId>;
pub type Literal = ::tt::Literal<TokenId>;
pub type Punct = ::tt::Punct<TokenId>;
pub type Ident = ::tt::Ident<TokenId>;
}
type Group = tt::Subtree;
type TokenTree = tt::TokenTree;
type Punct = tt::Punct;
type Spacing = tt::Spacing;
@ -148,15 +140,8 @@ impl server::TokenStream for TokenIdServer {
) -> Self::TokenStream {
match tree {
bridge::TokenTree::Group(group) => {
let group = Group {
delimiter: delim_to_internal(group.delimiter, group.span),
token_trees: match group.stream {
Some(stream) => stream.into_iter().collect(),
None => Box::new([]),
},
};
let tree = TokenTree::from(group);
Self::TokenStream::from_iter(iter::once(tree))
let group = TopSubtree::from_bridge(group);
TokenStream { token_trees: group.0 }
}
bridge::TokenTree::Ident(ident) => {
@ -167,7 +152,7 @@ impl server::TokenStream for TokenIdServer {
};
let leaf = tt::Leaf::from(ident);
let tree = TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree))
TokenStream { token_trees: vec![tree] }
}
bridge::TokenTree::Literal(literal) => {
@ -180,7 +165,7 @@ impl server::TokenStream for TokenIdServer {
let leaf = tt::Leaf::from(literal);
let tree = TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree))
TokenStream { token_trees: vec![tree] }
}
bridge::TokenTree::Punct(p) => {
@ -191,7 +176,7 @@ impl server::TokenStream for TokenIdServer {
};
let leaf = tt::Leaf::from(punct);
let tree = TokenTree::from(leaf);
Self::TokenStream::from_iter(iter::once(tree))
TokenStream { token_trees: vec![tree] }
}
}
}
@ -234,42 +219,7 @@ impl server::TokenStream for TokenIdServer {
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
stream
.into_iter()
.map(|tree| match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
bridge::TokenTree::Ident(bridge::Ident {
sym: ident.sym,
is_raw: ident.is_raw.yes(),
span: ident.span,
})
}
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal {
span: lit.span,
kind: literal_kind_to_external(lit.kind),
symbol: lit.symbol,
suffix: lit.suffix,
})
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
bridge::TokenTree::Punct(bridge::Punct {
ch: punct.char as u8,
joint: punct.spacing == Spacing::Joint,
span: punct.span,
})
}
tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
delimiter: delim_to_external(subtree.delimiter),
stream: if subtree.token_trees.is_empty() {
None
} else {
Some(TokenStream { token_trees: subtree.token_trees.into_vec() })
},
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
}),
})
.collect()
stream.into_bridge()
}
}
@ -398,7 +348,7 @@ mod tests {
close: tt::TokenId(0),
kind: tt::DelimiterKind::Brace,
},
token_trees: Box::new([]),
len: 0,
}),
],
};
@ -408,35 +358,38 @@ mod tests {
#[test]
fn test_ra_server_from_str() {
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: tt::TokenId(0),
close: tt::TokenId(0),
kind: tt::DelimiterKind::Parenthesis,
},
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
let subtree_paren_a = vec![
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: tt::TokenId(0),
close: tt::TokenId(0),
kind: tt::DelimiterKind::Parenthesis,
},
len: 1,
}),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
is_raw: tt::IdentIsRaw::No,
sym: Symbol::intern("a"),
span: tt::TokenId(0),
}))]),
});
})),
];
let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
assert_eq!(t1.token_trees.len(), 1);
assert_eq!(t1.token_trees[0], subtree_paren_a);
assert_eq!(t1.token_trees.len(), 2);
assert!(t1.token_trees[0..2] == subtree_paren_a);
let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
assert_eq!(t2.token_trees.len(), 2);
assert_eq!(t2.token_trees[0], subtree_paren_a);
assert_eq!(t2.token_trees.len(), 3);
assert!(t2.token_trees[0..2] == subtree_paren_a);
let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
assert_eq!(
underscore.token_trees[0],
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("_"),
span: tt::TokenId(0),
is_raw: tt::IdentIsRaw::No,
}))
assert!(
underscore.token_trees[0]
== tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("_"),
span: tt::TokenId(0),
is_raw: tt::IdentIsRaw::No,
}))
);
}
}

View file

@ -1,10 +1,20 @@
//! TokenStream implementation used by sysroot ABI
use tt::TokenTree;
use proc_macro::bridge;
#[derive(Debug, Clone)]
use crate::server_impl::{delim_to_external, literal_kind_to_external, TopSubtree};
#[derive(Clone)]
pub struct TokenStream<S> {
pub(super) token_trees: Vec<TokenTree<S>>,
pub(super) token_trees: Vec<tt::TokenTree<S>>,
}
impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TokenStream")
.field("token_trees", &tt::TokenTreesView::new(&self.token_trees))
.finish()
}
}
impl<S> Default for TokenStream<S> {
@ -13,84 +23,85 @@ impl<S> Default for TokenStream<S> {
}
}
impl<S> TokenStream<S> {
impl<S: Copy> TokenStream<S> {
pub(crate) fn new() -> Self {
TokenStream::default()
}
pub(crate) fn with_subtree(subtree: tt::Subtree<S>) -> Self {
if subtree.delimiter.kind != tt::DelimiterKind::Invisible {
TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
} else {
TokenStream { token_trees: subtree.token_trees.into_vec() }
pub(crate) fn with_subtree(subtree: TopSubtree<S>) -> Self {
let delimiter_kind = subtree.top_subtree().delimiter.kind;
let mut token_trees = subtree.0;
if delimiter_kind == tt::DelimiterKind::Invisible {
token_trees.remove(0);
}
TokenStream { token_trees }
}
pub(crate) fn into_subtree(self, call_site: S) -> tt::Subtree<S>
pub(crate) fn into_subtree(mut self, call_site: S) -> TopSubtree<S>
where
S: Copy,
{
tt::Subtree {
delimiter: tt::Delimiter {
open: call_site,
close: call_site,
kind: tt::DelimiterKind::Invisible,
},
token_trees: self.token_trees.into_boxed_slice(),
}
self.token_trees.insert(
0,
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: call_site,
close: call_site,
kind: tt::DelimiterKind::Invisible,
},
len: self.token_trees.len() as u32,
}),
);
TopSubtree(self.token_trees)
}
pub(super) fn is_empty(&self) -> bool {
self.token_trees.is_empty()
}
}
/// Creates a token stream containing a single token tree.
impl<S> From<TokenTree<S>> for TokenStream<S> {
fn from(tree: TokenTree<S>) -> TokenStream<S> {
TokenStream { token_trees: vec![tree] }
}
}
/// Collects a number of token trees into a single stream.
impl<S> FromIterator<TokenTree<S>> for TokenStream<S> {
fn from_iter<I: IntoIterator<Item = TokenTree<S>>>(trees: I) -> Self {
trees.into_iter().map(TokenStream::from).collect()
}
}
/// A "flattening" operation on token streams, collects token trees
/// from multiple token streams into a single stream.
impl<S> FromIterator<TokenStream<S>> for TokenStream<S> {
fn from_iter<I: IntoIterator<Item = TokenStream<S>>>(streams: I) -> Self {
let mut builder = TokenStreamBuilder::new();
streams.into_iter().for_each(|stream| builder.push(stream));
builder.build()
}
}
impl<S> Extend<TokenTree<S>> for TokenStream<S> {
fn extend<I: IntoIterator<Item = TokenTree<S>>>(&mut self, trees: I) {
self.extend(trees.into_iter().map(TokenStream::from));
}
}
impl<S> Extend<TokenStream<S>> for TokenStream<S> {
fn extend<I: IntoIterator<Item = TokenStream<S>>>(&mut self, streams: I) {
for item in streams {
for tkn in item {
match tkn {
tt::TokenTree::Subtree(subtree)
if subtree.delimiter.kind == tt::DelimiterKind::Invisible =>
{
self.token_trees.extend(subtree.token_trees.into_vec().into_iter());
}
_ => {
self.token_trees.push(tkn);
}
pub(crate) fn into_bridge(self) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
let mut result = Vec::new();
let mut iter = self.token_trees.into_iter();
while let Some(tree) = iter.next() {
match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
result.push(bridge::TokenTree::Ident(bridge::Ident {
sym: ident.sym,
is_raw: ident.is_raw.yes(),
span: ident.span,
}))
}
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
result.push(bridge::TokenTree::Literal(bridge::Literal {
span: lit.span,
kind: literal_kind_to_external(lit.kind),
symbol: lit.symbol,
suffix: lit.suffix,
}))
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
result.push(bridge::TokenTree::Punct(bridge::Punct {
ch: punct.char as u8,
joint: punct.spacing == tt::Spacing::Joint,
span: punct.span,
}))
}
tt::TokenTree::Subtree(subtree) => {
result.push(bridge::TokenTree::Group(bridge::Group {
delimiter: delim_to_external(subtree.delimiter),
stream: if subtree.len == 0 {
None
} else {
Some(TokenStream {
token_trees: iter.by_ref().take(subtree.usize_len()).collect(),
})
},
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
}))
}
}
}
result
}
}
@ -103,19 +114,7 @@ pub(super) mod token_stream_impls {
use core::fmt;
use super::{TokenStream, TokenTree};
/// An iterator over `TokenStream`'s `TokenTree`s.
/// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
/// and returns whole groups as token trees.
impl<S> IntoIterator for TokenStream<S> {
type Item = TokenTree<S>;
type IntoIter = std::vec::IntoIter<TokenTree<S>>;
fn into_iter(self) -> Self::IntoIter {
self.token_trees.into_iter()
}
}
use super::{TokenStream, TopSubtree};
/// Attempts to break the string into tokens and parse those tokens into a token stream.
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
@ -133,7 +132,7 @@ pub(super) mod token_stream_impls {
)
.ok_or_else(|| format!("lexing error: {src}"))?;
Ok(TokenStream::with_subtree(subtree))
Ok(TokenStream::with_subtree(TopSubtree(subtree.0.into_vec())))
}
}
@ -145,13 +144,13 @@ pub(super) mod token_stream_impls {
}
}
impl<S> TokenStreamBuilder<S> {
impl<S: Copy> TokenStreamBuilder<S> {
pub(super) fn new() -> TokenStreamBuilder<S> {
TokenStreamBuilder { acc: TokenStream::new() }
}
pub(super) fn push(&mut self, stream: TokenStream<S>) {
self.acc.extend(stream)
self.acc.token_trees.extend(stream.token_trees)
}
pub(super) fn build(self) -> TokenStream<S> {

View file

@ -7,10 +7,12 @@ use tt::TextRange;
use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
crate::server_impl::TokenStream::with_subtree(
crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
.unwrap(),
)
.unwrap()
.0
.into_vec(),
))
}
fn parse_string_spanned(
@ -18,9 +20,12 @@ fn parse_string_spanned(
call_site: SyntaxContextId,
src: &str,
) -> crate::server_impl::TokenStream<Span> {
crate::server_impl::TokenStream::with_subtree(
syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
)
crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src)
.unwrap()
.0
.into_vec(),
))
}
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect, expect_s: Expect) {