Re-enable proc-macros

This commit is contained in:
Lukas Wirth 2023-11-28 16:28:51 +01:00
parent 98cfdde8ba
commit b98597f06d
24 changed files with 787 additions and 493 deletions

View file

@ -11,6 +11,8 @@ pub mod msg;
mod process;
mod version;
use base_db::span::SpanData;
use indexmap::IndexSet;
use paths::AbsPathBuf;
use std::{fmt, io, sync::Mutex};
use triomphe::Arc;
@ -18,7 +20,7 @@ use triomphe::Arc;
use serde::{Deserialize, Serialize};
use crate::{
msg::{flat::SerializableSpan, ExpandMacro, FlatTree, PanicMessage},
msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
process::ProcMacroProcessSrv,
};
@ -132,32 +134,49 @@ impl ProcMacro {
self.kind
}
pub fn expand<const L: usize, S: SerializableSpan<L>>(
pub fn expand(
&self,
subtree: &tt::Subtree<S>,
attr: Option<&tt::Subtree<S>>,
subtree: &tt::Subtree<SpanData>,
attr: Option<&tt::Subtree<SpanData>>,
env: Vec<(String, String)>,
) -> Result<Result<tt::Subtree<S>, PanicMessage>, ServerError> {
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<Result<tt::Subtree<SpanData>, PanicMessage>, ServerError> {
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
let current_dir = env
.iter()
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
.map(|(_, value)| value.clone());
let mut span_data_table = IndexSet::default();
let def_site = span_data_table.insert_full(def_site).0;
let call_site = span_data_table.insert_full(call_site).0;
let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro {
macro_body: FlatTree::new(subtree, version),
macro_body: FlatTree::new(subtree, version, &mut span_data_table),
macro_name: self.name.to_string(),
attributes: attr.map(|subtree| FlatTree::new(subtree, version)),
attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
lib: self.dylib_path.to_path_buf().into(),
env,
current_dir,
has_global_spans: ExpnGlobals {
serialize: version >= HAS_GLOBAL_SPANS,
def_site,
call_site,
mixed_site,
},
};
let request = msg::Request::ExpandMacro(task);
let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
let response = self
.process
.lock()
.unwrap_or_else(|e| e.into_inner())
.send_task(msg::Request::ExpandMacro(task))?;
match response {
msg::Response::ExpandMacro(it) => {
Ok(it.map(|tree| FlatTree::to_subtree(tree, version)))
Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
}
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
Err(ServerError { message: "unexpected response".to_string(), io: None })

View file

@ -10,21 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::ProcMacroKind;
pub use crate::msg::flat::FlatTree;
pub use crate::msg::flat::{FlatTree, TokenId};
// The versions of the server protocol
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
pub const VERSION_CHECK_VERSION: u32 = 1;
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
/// This version changes how spans are encoded, kind of. Prior to this version,
/// spans were represented as a single u32 which effectively forced spans to be
/// token ids. Starting with this version, the span fields are still u32,
/// but if the size of the span is greater than 1 then the span data is encoded in
/// an additional vector where the span represents the offset into that vector.
/// This allows encoding bigger spans while supporting the previous versions.
pub const VARIABLE_SIZED_SPANS: u32 = 2;
pub const HAS_GLOBAL_SPANS: u32 = 3;
pub const CURRENT_API_VERSION: u32 = VARIABLE_SIZED_SPANS;
pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS;
#[derive(Debug, Serialize, Deserialize)]
pub enum Request {
@ -66,6 +60,24 @@ pub struct ExpandMacro {
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
/// marker for serde skip stuff
#[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
pub has_global_spans: ExpnGlobals,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ExpnGlobals {
#[serde(skip_serializing)]
pub serialize: bool,
pub def_site: usize,
pub call_site: usize,
pub mixed_site: usize,
}
impl ExpnGlobals {
fn skip_serializing_if(&self) -> bool {
!self.serialize
}
}
pub trait Message: Serialize + DeserializeOwned {
@ -120,38 +132,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
Ok(())
}
/*TODO
#[cfg(test)]
mod tests {
use tt::{
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree,
TokenId, TokenTree,
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
FileId,
};
use la_arena::RawIdx;
use text_size::{TextRange, TextSize};
use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
use super::*;
fn fixture_token_tree() -> Subtree<TokenId> {
let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() };
subtree
.token_trees
.push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into()));
subtree
.token_trees
.push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into()));
fn fixture_token_tree() -> Subtree<SpanData> {
let anchor =
SpanAnchor { file_id: FileId(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) };
let mut subtree = Subtree {
delimiter: Delimiter {
open: SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor,
ctx: SyntaxContextId::ROOT,
},
close: SpanData {
range: TextRange::empty(TextSize::new(13)),
anchor,
ctx: SyntaxContextId::ROOT,
},
kind: DelimiterKind::Invisible,
},
token_trees: Vec::new(),
};
subtree.token_trees.push(TokenTree::Leaf(
Ident {
text: "struct".into(),
span: SpanData {
range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
anchor,
ctx: SyntaxContextId::ROOT,
},
}
.into(),
));
subtree.token_trees.push(TokenTree::Leaf(
Ident {
text: "Foo".into(),
span: SpanData {
range: TextRange::at(TextSize::new(5), TextSize::of("Foo")),
anchor,
ctx: SyntaxContextId::ROOT,
},
}
.into(),
));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
text: "Foo".into(),
span: TokenId::DUMMY,
span: SpanData {
range: TextRange::at(TextSize::new(8), TextSize::of("Foo")),
anchor,
ctx: SyntaxContextId::ROOT,
},
})));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
char: '@',
span: TokenId::DUMMY,
span: SpanData {
range: TextRange::at(TextSize::new(11), TextSize::of('@')),
anchor,
ctx: SyntaxContextId::ROOT,
},
spacing: Spacing::Joint,
})));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Delimiter {
open: TokenId(2),
close: TokenId::DUMMY,
open: SpanData {
range: TextRange::at(TextSize::new(12), TextSize::of('{')),
anchor,
ctx: SyntaxContextId::ROOT,
},
close: SpanData {
range: TextRange::at(TextSize::new(13), TextSize::of('}')),
anchor,
ctx: SyntaxContextId::ROOT,
},
kind: DelimiterKind::Brace,
},
token_trees: vec![],
@ -162,20 +225,26 @@ mod tests {
#[test]
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
let mut span_data_table = Default::default();
let task = ExpandMacro {
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION),
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
lib: std::env::current_dir().unwrap(),
env: Default::default(),
current_dir: Default::default(),
has_global_spans: ExpnGlobals {
serialize: true,
def_site: 0,
call_site: 0,
mixed_site: 0,
},
};
let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
}
}
*/

View file

@ -37,40 +37,40 @@
use std::collections::{HashMap, VecDeque};
use base_db::span::SpanData;
use indexmap::IndexSet;
use serde::{Deserialize, Serialize};
use text_size::TextRange;
use tt::{Span, SyntaxContext};
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
pub trait SerializableSpan<const L: usize>: Span {
fn into_u32(self) -> [u32; L];
fn from_u32(input: [u32; L]) -> Self;
}
// impl SerializableSpan<1> for tt::TokenId {
// fn into_u32(self) -> [u32; 1] {
// [self.0]
// }
// fn from_u32([input]: [u32; 1]) -> Self {
// tt::TokenId(input)
// }
// }
pub type SpanDataIndexMap = IndexSet<SpanData>;
impl<Anchor, Ctx> SerializableSpan<3> for tt::SpanData<Anchor, Ctx>
where
Anchor: From<u32> + Into<u32>,
Self: Span,
Ctx: SyntaxContext,
{
fn into_u32(self) -> [u32; 3] {
[self.anchor.into(), self.range.start().into(), self.range.end().into()]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(pub u32);
impl std::fmt::Debug for TokenId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
fn from_u32([file_id, start, end]: [u32; 3]) -> Self {
tt::SpanData {
anchor: file_id.into(),
range: TextRange::new(start.into(), end.into()),
ctx: Ctx::DUMMY,
}
}
impl TokenId {
pub const DEF_SITE: Self = TokenId(0);
pub const CALL_SITE: Self = TokenId(0);
pub const MIXED_SITE: Self = TokenId(0);
}
impl tt::Span for TokenId {
const DUMMY: Self = TokenId(!0);
type Anchor = ();
fn anchor(self) -> Self::Anchor {
()
}
fn mk(_: Self::Anchor, _: text_size::TextRange) -> Self {
Self::DUMMY
}
}
@ -82,82 +82,41 @@ pub struct FlatTree {
ident: Vec<u32>,
token_tree: Vec<u32>,
text: Vec<String>,
#[serde(skip_serializing_if = "SpanMap::do_serialize")]
#[serde(default)]
span_map: SpanMap,
}
#[derive(Serialize, Deserialize, Debug)]
struct SpanMap {
#[serde(skip_serializing)]
serialize: bool,
span_size: u32,
spans: Vec<u32>,
}
impl Default for SpanMap {
fn default() -> Self {
Self { serialize: false, span_size: 1, spans: Default::default() }
}
}
impl SpanMap {
fn serialize_span<const L: usize, S: SerializableSpan<L>>(&mut self, span: S) -> u32 {
let u32s = span.into_u32();
if L == 1 {
u32s[0]
} else {
let offset = self.spans.len() as u32;
self.spans.extend(u32s);
offset
}
}
fn deserialize_span<const L: usize, S: SerializableSpan<L>>(&self, offset: u32) -> S {
S::from_u32(if L == 1 {
[offset].as_ref().try_into().unwrap()
} else {
self.spans[offset as usize..][..L].try_into().unwrap()
})
}
}
impl SpanMap {
fn do_serialize(&self) -> bool {
self.serialize
}
}
struct SubtreeRepr<const L: usize, S> {
open: S,
close: S,
struct SubtreeRepr {
open: TokenId,
close: TokenId,
kind: tt::DelimiterKind,
tt: [u32; 2],
}
struct LiteralRepr<const L: usize, S> {
id: S,
struct LiteralRepr {
id: TokenId,
text: u32,
}
struct PunctRepr<const L: usize, S> {
id: S,
struct PunctRepr {
id: TokenId,
char: char,
spacing: tt::Spacing,
}
struct IdentRepr<const L: usize, S> {
id: S,
struct IdentRepr {
id: TokenId,
text: u32,
}
impl FlatTree {
pub fn new<const L: usize, S: SerializableSpan<L>>(
subtree: &tt::Subtree<S>,
pub fn new(
subtree: &tt::Subtree<SpanData>,
version: u32,
span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
work: VecDeque::new(),
span_data_table,
subtree: Vec::new(),
literal: Vec::new(),
@ -167,78 +126,111 @@ impl FlatTree {
text: Vec::new(),
};
w.write(subtree);
assert!(L == 1 || version >= VARIABLE_SIZED_SPANS);
let mut span_map = SpanMap {
serialize: version >= VARIABLE_SIZED_SPANS && L != 1,
span_size: L as u32,
spans: Vec::new(),
};
return FlatTree {
FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
write_vec(&mut span_map, w.subtree, SubtreeRepr::write_with_close_span)
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
write_vec(&mut span_map, w.subtree, SubtreeRepr::write)
write_vec(w.subtree, SubtreeRepr::write)
},
literal: write_vec(&mut span_map, w.literal, LiteralRepr::write),
punct: write_vec(&mut span_map, w.punct, PunctRepr::write),
ident: write_vec(&mut span_map, w.ident, IdentRepr::write),
literal: write_vec(w.literal, LiteralRepr::write),
punct: write_vec(w.punct, PunctRepr::write),
ident: write_vec(w.ident, IdentRepr::write),
token_tree: w.token_tree,
text: w.text,
span_map,
};
fn write_vec<T, F: Fn(T, &mut SpanMap) -> [u32; N], const N: usize>(
map: &mut SpanMap,
xs: Vec<T>,
f: F,
) -> Vec<u32> {
xs.into_iter().flat_map(|it| f(it, map)).collect()
}
}
pub fn to_subtree<const L: usize, S: SerializableSpan<L>>(
pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
work: VecDeque::new(),
span_data_table: &mut (),
subtree: Vec::new(),
literal: Vec::new(),
punct: Vec::new(),
ident: Vec::new(),
token_tree: Vec::new(),
text: Vec::new(),
};
w.write(subtree);
FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
write_vec(w.subtree, SubtreeRepr::write)
},
literal: write_vec(w.literal, LiteralRepr::write),
punct: write_vec(w.punct, PunctRepr::write),
ident: write_vec(w.ident, IdentRepr::write),
token_tree: w.token_tree,
text: w.text,
}
}
pub fn to_subtree_resolved(
self,
version: u32,
) -> tt::Subtree<S> {
assert!((version >= VARIABLE_SIZED_SPANS || L == 1) && L as u32 == self.span_map.span_size);
return Reader {
span_data_table: &SpanDataIndexMap,
) -> tt::Subtree<SpanData> {
Reader {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(&self.span_map, self.subtree, SubtreeRepr::read_with_close_span)
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
read_vec(&self.span_map, self.subtree, SubtreeRepr::read)
read_vec(self.subtree, SubtreeRepr::read)
},
literal: read_vec(&self.span_map, self.literal, LiteralRepr::read),
punct: read_vec(&self.span_map, self.punct, PunctRepr::read),
ident: read_vec(&self.span_map, self.ident, IdentRepr::read),
literal: read_vec(self.literal, LiteralRepr::read),
punct: read_vec(self.punct, PunctRepr::read),
ident: read_vec(self.ident, IdentRepr::read),
token_tree: self.token_tree,
text: self.text,
span_data_table,
}
.read();
.read()
}
fn read_vec<T, F: Fn([u32; N], &SpanMap) -> T, const N: usize>(
map: &SpanMap,
xs: Vec<u32>,
f: F,
) -> Vec<T> {
let mut chunks = xs.chunks_exact(N);
let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap(), map)).collect();
assert!(chunks.remainder().is_empty());
res
pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
Reader {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
read_vec(self.subtree, SubtreeRepr::read)
},
literal: read_vec(self.literal, LiteralRepr::read),
punct: read_vec(self.punct, PunctRepr::read),
ident: read_vec(self.ident, IdentRepr::read),
token_tree: self.token_tree,
text: self.text,
span_data_table: &(),
}
.read()
}
}
impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
fn write(self, map: &mut SpanMap) -> [u32; 4] {
fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
let mut chunks = xs.chunks_exact(N);
let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
assert!(chunks.remainder().is_empty());
res
}
fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
xs.into_iter().flat_map(f).collect()
}
impl SubtreeRepr {
fn write(self) -> [u32; 4] {
let kind = match self.kind {
tt::DelimiterKind::Invisible => 0,
tt::DelimiterKind::Parenthesis => 1,
tt::DelimiterKind::Brace => 2,
tt::DelimiterKind::Bracket => 3,
};
[map.serialize_span(self.open), kind, self.tt[0], self.tt[1]]
[self.open.0, kind, self.tt[0], self.tt[1]]
}
fn read([open, kind, lo, len]: [u32; 4], map: &SpanMap) -> Self {
fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
let kind = match kind {
0 => tt::DelimiterKind::Invisible,
1 => tt::DelimiterKind::Parenthesis,
@ -246,24 +238,18 @@ impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
SubtreeRepr { open: map.deserialize_span(open), close: S::DUMMY, kind, tt: [lo, len] }
SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
}
fn write_with_close_span(self, map: &mut SpanMap) -> [u32; 5] {
fn write_with_close_span(self) -> [u32; 5] {
let kind = match self.kind {
tt::DelimiterKind::Invisible => 0,
tt::DelimiterKind::Parenthesis => 1,
tt::DelimiterKind::Brace => 2,
tt::DelimiterKind::Bracket => 3,
};
[
map.serialize_span(self.open),
map.serialize_span(self.close),
kind,
self.tt[0],
self.tt[1],
]
[self.open.0, self.close.0, kind, self.tt[0], self.tt[1]]
}
fn read_with_close_span([open, close, kind, lo, len]: [u32; 5], map: &SpanMap) -> Self {
fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
let kind = match kind {
0 => tt::DelimiterKind::Invisible,
1 => tt::DelimiterKind::Parenthesis,
@ -271,64 +257,86 @@ impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
SubtreeRepr {
open: map.deserialize_span(open),
close: map.deserialize_span(close),
kind,
tt: [lo, len],
}
SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] }
}
}
impl<const L: usize, S: SerializableSpan<L>> LiteralRepr<L, S> {
fn write(self, map: &mut SpanMap) -> [u32; 2] {
[map.serialize_span(self.id), self.text]
impl LiteralRepr {
fn write(self) -> [u32; 2] {
[self.id.0, self.text]
}
fn read([id, text]: [u32; 2], map: &SpanMap) -> Self {
LiteralRepr { id: map.deserialize_span(id), text }
fn read([id, text]: [u32; 2]) -> LiteralRepr {
LiteralRepr { id: TokenId(id), text }
}
}
impl<const L: usize, S: SerializableSpan<L>> PunctRepr<L, S> {
fn write(self, map: &mut SpanMap) -> [u32; 3] {
impl PunctRepr {
fn write(self) -> [u32; 3] {
let spacing = match self.spacing {
tt::Spacing::Alone => 0,
tt::Spacing::Joint => 1,
};
[map.serialize_span(self.id), self.char as u32, spacing]
[self.id.0, self.char as u32, spacing]
}
fn read([id, char, spacing]: [u32; 3], map: &SpanMap) -> Self {
fn read([id, char, spacing]: [u32; 3]) -> PunctRepr {
let spacing = match spacing {
0 => tt::Spacing::Alone,
1 => tt::Spacing::Joint,
other => panic!("bad spacing {other}"),
};
PunctRepr { id: map.deserialize_span(id), char: char.try_into().unwrap(), spacing }
PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
}
}
impl<const L: usize, S: SerializableSpan<L>> IdentRepr<L, S> {
fn write(self, map: &mut SpanMap) -> [u32; 2] {
[map.serialize_span(self.id), self.text]
impl IdentRepr {
fn write(self) -> [u32; 2] {
[self.id.0, self.text]
}
fn read(data: [u32; 2], map: &SpanMap) -> Self {
IdentRepr { id: map.deserialize_span(data[0]), text: data[1] }
fn read(data: [u32; 2]) -> IdentRepr {
IdentRepr { id: TokenId(data[0]), text: data[1] }
}
}
struct Writer<'a, const L: usize, S> {
trait Span: Copy {
type Table;
fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId;
fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self;
}
impl Span for TokenId {
type Table = ();
fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId {
token_id
}
fn span_for_token_id((): &Self::Table, id: TokenId) -> Self {
id
}
}
impl Span for SpanData {
type Table = IndexSet<SpanData>;
fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
TokenId(table.insert_full(span).0 as u32)
}
fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self {
*table.get_index(id.0 as usize).unwrap_or_else(|| &table[0])
}
}
struct Writer<'a, 'span, S: Span> {
work: VecDeque<(usize, &'a tt::Subtree<S>)>,
string_table: HashMap<&'a str, u32>,
span_data_table: &'span mut S::Table,
subtree: Vec<SubtreeRepr<L, S>>,
literal: Vec<LiteralRepr<L, S>>,
punct: Vec<PunctRepr<L, S>>,
ident: Vec<IdentRepr<L, S>>,
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>,
ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
}
impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
impl<'a, 'span, S: Span> Writer<'a, 'span, S> {
fn write(&mut self, root: &'a tt::Subtree<S>) {
self.enqueue(root);
while let Some((idx, subtree)) = self.work.pop_front() {
@ -336,6 +344,10 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
}
}
fn token_id_of(&mut self, span: S) -> TokenId {
S::token_id_of(self.span_data_table, span)
}
fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.token_trees.len();
@ -353,22 +365,21 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
tt::Leaf::Literal(lit) => {
let idx = self.literal.len() as u32;
let text = self.intern(&lit.text);
self.literal.push(LiteralRepr { id: lit.span, text });
let id = self.token_id_of(lit.span);
self.literal.push(LiteralRepr { id, text });
idx << 2 | 0b01
}
tt::Leaf::Punct(punct) => {
let idx = self.punct.len() as u32;
self.punct.push(PunctRepr {
char: punct.char,
spacing: punct.spacing,
id: punct.span,
});
let id = self.token_id_of(punct.span);
self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id });
idx << 2 | 0b10
}
tt::Leaf::Ident(ident) => {
let idx = self.ident.len() as u32;
let text = self.intern(&ident.text);
self.ident.push(IdentRepr { id: ident.span, text });
let id = self.token_id_of(ident.span);
self.ident.push(IdentRepr { id, text });
idx << 2 | 0b11
}
},
@ -380,8 +391,8 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
let idx = self.subtree.len();
let open = subtree.delimiter.open;
let close = subtree.delimiter.close;
let open = self.token_id_of(subtree.delimiter.open);
let close = self.token_id_of(subtree.delimiter.close);
let delimiter_kind = subtree.delimiter.kind;
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
self.work.push_back((idx, subtree));
@ -398,23 +409,29 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
}
}
struct Reader<const L: usize, S> {
subtree: Vec<SubtreeRepr<L, S>>,
literal: Vec<LiteralRepr<L, S>>,
punct: Vec<PunctRepr<L, S>>,
ident: Vec<IdentRepr<L, S>>,
struct Reader<'span, S: Span> {
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>,
ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
span_data_table: &'span S::Table,
}
impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
impl<'span, S: Span> Reader<'span, S> {
pub(crate) fn read(self) -> tt::Subtree<S> {
let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
let read_span = |id| S::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {
let repr = &self.subtree[i];
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
let s = tt::Subtree {
delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind },
delimiter: tt::Delimiter {
open: read_span(repr.open),
close: read_span(repr.close),
kind: repr.kind,
},
token_trees: token_trees
.iter()
.copied()
@ -429,7 +446,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
let repr = &self.literal[idx];
tt::Leaf::Literal(tt::Literal {
text: self.text[repr.text as usize].as_str().into(),
span: repr.id,
span: read_span(repr.id),
})
.into()
}
@ -438,7 +455,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
tt::Leaf::Punct(tt::Punct {
char: repr.char,
spacing: repr.spacing,
span: repr.id,
span: read_span(repr.id),
})
.into()
}
@ -446,7 +463,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
let repr = &self.ident[idx];
tt::Leaf::Ident(tt::Ident {
text: self.text[repr.text as usize].as_str().into(),
span: repr.id,
span: read_span(repr.id),
})
.into()
}