mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 20:42:04 +00:00
Make tt generic over the span data
This commit is contained in:
parent
d805c74c51
commit
41a46a78f2
48 changed files with 806 additions and 569 deletions
|
|
@ -11,6 +11,7 @@ mod ra_server;
|
|||
use libloading::Library;
|
||||
use proc_macro_api::ProcMacroKind;
|
||||
|
||||
use super::tt;
|
||||
use super::PanicMessage;
|
||||
|
||||
pub use ra_server::TokenStream;
|
||||
|
|
|
|||
|
|
@ -15,6 +15,8 @@ use std::hash::Hash;
|
|||
use std::ops::Bound;
|
||||
use std::{ascii, vec::IntoIter};
|
||||
|
||||
use crate::tt;
|
||||
|
||||
type Group = tt::Subtree;
|
||||
type TokenTree = tt::TokenTree;
|
||||
type Punct = tt::Punct;
|
||||
|
|
@ -33,7 +35,7 @@ impl TokenStream {
|
|||
}
|
||||
|
||||
pub fn with_subtree(subtree: tt::Subtree) -> Self {
|
||||
if subtree.delimiter.is_some() {
|
||||
if subtree.delimiter.kind != tt::DelimiterKind::Invisible {
|
||||
TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
|
||||
} else {
|
||||
TokenStream { token_trees: subtree.token_trees }
|
||||
|
|
@ -41,7 +43,7 @@ impl TokenStream {
|
|||
}
|
||||
|
||||
pub fn into_subtree(self) -> tt::Subtree {
|
||||
tt::Subtree { delimiter: None, token_trees: self.token_trees }
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
|
|
@ -84,7 +86,9 @@ impl Extend<TokenStream> for TokenStream {
|
|||
for item in streams {
|
||||
for tkn in item {
|
||||
match tkn {
|
||||
tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
|
||||
tt::TokenTree::Subtree(subtree)
|
||||
if subtree.delimiter.kind == tt::DelimiterKind::Invisible =>
|
||||
{
|
||||
self.token_trees.extend(subtree.token_trees);
|
||||
}
|
||||
_ => {
|
||||
|
|
@ -165,7 +169,7 @@ pub struct TokenStreamBuilder {
|
|||
pub mod token_stream {
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::{TokenStream, TokenTree};
|
||||
use super::{tt, TokenStream, TokenTree};
|
||||
|
||||
/// An iterator over `TokenStream`'s `TokenTree`s.
|
||||
/// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
|
||||
|
|
@ -202,15 +206,17 @@ pub mod token_stream {
|
|||
|
||||
impl ToString for TokenStream {
|
||||
fn to_string(&self) -> String {
|
||||
tt::pretty(&self.token_trees)
|
||||
::tt::pretty(&self.token_trees)
|
||||
}
|
||||
}
|
||||
|
||||
fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
|
||||
tt::Subtree {
|
||||
delimiter: subtree
|
||||
.delimiter
|
||||
.map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
|
||||
delimiter: tt::Delimiter {
|
||||
open: tt::TokenId::UNSPECIFIED,
|
||||
close: tt::TokenId::UNSPECIFIED,
|
||||
..subtree.delimiter
|
||||
},
|
||||
token_trees: subtree
|
||||
.token_trees
|
||||
.into_iter()
|
||||
|
|
@ -233,13 +239,13 @@ pub mod token_stream {
|
|||
fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
|
||||
match leaf {
|
||||
tt::Leaf::Literal(lit) => {
|
||||
tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
|
||||
tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
|
||||
}
|
||||
tt::Leaf::Punct(punct) => {
|
||||
tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
|
||||
tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
|
||||
}
|
||||
tt::Leaf::Ident(ident) => {
|
||||
tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
|
||||
tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -389,22 +395,22 @@ impl server::TokenStream for RustAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
|
||||
fn delim_to_internal(d: bridge::Delimiter) -> tt::Delimiter {
|
||||
let kind = match d {
|
||||
bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
|
||||
bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
|
||||
bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
|
||||
bridge::Delimiter::None => return None,
|
||||
bridge::Delimiter::None => tt::DelimiterKind::Invisible,
|
||||
};
|
||||
Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
|
||||
tt::Delimiter { open: tt::TokenId::unspecified(), close: tt::TokenId::unspecified(), kind }
|
||||
}
|
||||
|
||||
fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
|
||||
match d.map(|it| it.kind) {
|
||||
Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
|
||||
Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
|
||||
Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
|
||||
None => bridge::Delimiter::None,
|
||||
fn delim_to_external(d: tt::Delimiter) -> bridge::Delimiter {
|
||||
match d.kind {
|
||||
tt::DelimiterKind::Parenthesis => bridge::Delimiter::Parenthesis,
|
||||
tt::DelimiterKind::Brace => bridge::Delimiter::Brace,
|
||||
tt::DelimiterKind::Bracket => bridge::Delimiter::Bracket,
|
||||
tt::DelimiterKind::Invisible => bridge::Delimiter::None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -443,23 +449,19 @@ impl server::Group for RustAnalyzer {
|
|||
}
|
||||
|
||||
fn span(&mut self, group: &Self::Group) -> Self::Span {
|
||||
group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
|
||||
group.delimiter.open
|
||||
}
|
||||
|
||||
fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
|
||||
if let Some(delim) = &mut group.delimiter {
|
||||
delim.id = span;
|
||||
}
|
||||
group.delimiter.open = span;
|
||||
}
|
||||
|
||||
fn span_open(&mut self, group: &Self::Group) -> Self::Span {
|
||||
// FIXME we only store one `TokenId` for the delimiters
|
||||
group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
|
||||
group.delimiter.open
|
||||
}
|
||||
|
||||
fn span_close(&mut self, group: &Self::Group) -> Self::Span {
|
||||
// FIXME we only store one `TokenId` for the delimiters
|
||||
group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
|
||||
group.delimiter.close
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -468,7 +470,7 @@ impl server::Punct for RustAnalyzer {
|
|||
tt::Punct {
|
||||
char: ch,
|
||||
spacing: spacing_to_internal(spacing),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
}
|
||||
}
|
||||
fn as_char(&mut self, punct: Self::Punct) -> char {
|
||||
|
|
@ -478,28 +480,27 @@ impl server::Punct for RustAnalyzer {
|
|||
spacing_to_external(punct.spacing)
|
||||
}
|
||||
fn span(&mut self, punct: Self::Punct) -> Self::Span {
|
||||
punct.id
|
||||
punct.span
|
||||
}
|
||||
fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
|
||||
tt::Punct { id: span, ..punct }
|
||||
tt::Punct { span: span, ..punct }
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Ident for RustAnalyzer {
|
||||
fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
|
||||
IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw(
|
||||
string.into(),
|
||||
IdentId(self.ident_interner.intern(&IdentData(tt::Ident {
|
||||
text: if is_raw { ::tt::SmolStr::from_iter(["r#", string]) } else { string.into() },
|
||||
span,
|
||||
is_raw,
|
||||
))))
|
||||
})))
|
||||
}
|
||||
|
||||
fn span(&mut self, ident: Self::Ident) -> Self::Span {
|
||||
self.ident_interner.get(ident.0).0.id
|
||||
self.ident_interner.get(ident.0).0.span
|
||||
}
|
||||
fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
|
||||
let data = self.ident_interner.get(ident.0);
|
||||
let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
|
||||
let new = IdentData(tt::Ident { span: span, ..data.0.clone() });
|
||||
IdentId(self.ident_interner.intern(&new))
|
||||
}
|
||||
}
|
||||
|
|
@ -511,7 +512,7 @@ impl server::Literal for RustAnalyzer {
|
|||
"".to_owned()
|
||||
}
|
||||
fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
|
||||
Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
|
||||
Ok(Literal { text: s.into(), span: tt::TokenId::unspecified() })
|
||||
}
|
||||
fn symbol(&mut self, literal: &Self::Literal) -> String {
|
||||
literal.text.to_string()
|
||||
|
|
@ -529,7 +530,7 @@ impl server::Literal for RustAnalyzer {
|
|||
Ok(n) => n.to_string(),
|
||||
Err(_) => n.parse::<u128>().unwrap().to_string(),
|
||||
};
|
||||
Literal { text: n.into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: n.into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
|
||||
|
|
@ -549,7 +550,7 @@ impl server::Literal for RustAnalyzer {
|
|||
|
||||
let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
|
||||
|
||||
Literal { text: text.into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: text.into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn float(&mut self, n: &str) -> Self::Literal {
|
||||
|
|
@ -558,19 +559,19 @@ impl server::Literal for RustAnalyzer {
|
|||
if !text.contains('.') {
|
||||
text += ".0"
|
||||
}
|
||||
Literal { text: text.into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: text.into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn f32(&mut self, n: &str) -> Self::Literal {
|
||||
let n: f32 = n.parse().unwrap();
|
||||
let text = format!("{n}f32");
|
||||
Literal { text: text.into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: text.into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn f64(&mut self, n: &str) -> Self::Literal {
|
||||
let n: f64 = n.parse().unwrap();
|
||||
let text = format!("{n}f64");
|
||||
Literal { text: text.into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: text.into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn string(&mut self, string: &str) -> Self::Literal {
|
||||
|
|
@ -578,11 +579,11 @@ impl server::Literal for RustAnalyzer {
|
|||
for ch in string.chars() {
|
||||
escaped.extend(ch.escape_debug());
|
||||
}
|
||||
Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: format!("\"{escaped}\"").into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn character(&mut self, ch: char) -> Self::Literal {
|
||||
Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: format!("'{ch}'").into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
|
||||
|
|
@ -593,15 +594,15 @@ impl server::Literal for RustAnalyzer {
|
|||
.map(Into::<char>::into)
|
||||
.collect::<String>();
|
||||
|
||||
Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() }
|
||||
Literal { text: format!("b\"{string}\"").into(), span: tt::TokenId::unspecified() }
|
||||
}
|
||||
|
||||
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
|
||||
literal.id
|
||||
literal.span
|
||||
}
|
||||
|
||||
fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
|
||||
literal.id = span;
|
||||
literal.span = span;
|
||||
}
|
||||
|
||||
fn subspan(
|
||||
|
|
@ -784,17 +785,18 @@ mod tests {
|
|||
token_trees: vec![
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "struct".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
})),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "T".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
})),
|
||||
tt::TokenTree::Subtree(tt::Subtree {
|
||||
delimiter: Some(tt::Delimiter {
|
||||
id: tt::TokenId::unspecified(),
|
||||
delimiter: tt::Delimiter {
|
||||
open: tt::TokenId::unspecified(),
|
||||
close: tt::TokenId::unspecified(),
|
||||
kind: tt::DelimiterKind::Brace,
|
||||
}),
|
||||
},
|
||||
token_trees: vec![],
|
||||
}),
|
||||
],
|
||||
|
|
@ -807,13 +809,14 @@ mod tests {
|
|||
fn test_ra_server_from_str() {
|
||||
use std::str::FromStr;
|
||||
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
|
||||
delimiter: Some(tt::Delimiter {
|
||||
id: tt::TokenId::unspecified(),
|
||||
delimiter: tt::Delimiter {
|
||||
open: tt::TokenId::unspecified(),
|
||||
close: tt::TokenId::unspecified(),
|
||||
kind: tt::DelimiterKind::Parenthesis,
|
||||
}),
|
||||
},
|
||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "a".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
}))],
|
||||
});
|
||||
|
||||
|
|
@ -830,7 +833,7 @@ mod tests {
|
|||
underscore.token_trees[0],
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "_".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ mod ra_server;
|
|||
use libloading::Library;
|
||||
use proc_macro_api::ProcMacroKind;
|
||||
|
||||
use super::PanicMessage;
|
||||
use super::{tt, PanicMessage};
|
||||
|
||||
pub use ra_server::TokenStream;
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ pub use symbol::*;
|
|||
|
||||
use std::ops::Bound;
|
||||
|
||||
use crate::tt;
|
||||
|
||||
type Group = tt::Subtree;
|
||||
type TokenTree = tt::TokenTree;
|
||||
type Punct = tt::Punct;
|
||||
|
|
@ -108,8 +110,9 @@ impl server::TokenStream for RustAnalyzer {
|
|||
|
||||
bridge::TokenTree::Ident(ident) => {
|
||||
let text = ident.sym.text();
|
||||
let text = if ident.is_raw { tt::SmolStr::from_iter(["r#", &text]) } else { text };
|
||||
let ident: tt::Ident = tt::Ident { text, id: ident.span };
|
||||
let text =
|
||||
if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text };
|
||||
let ident: tt::Ident = tt::Ident { text, span: ident.span };
|
||||
let leaf = tt::Leaf::from(ident);
|
||||
let tree = TokenTree::from(leaf);
|
||||
Self::TokenStream::from_iter(vec![tree])
|
||||
|
|
@ -118,9 +121,9 @@ impl server::TokenStream for RustAnalyzer {
|
|||
bridge::TokenTree::Literal(literal) => {
|
||||
let literal = LiteralFormatter(literal);
|
||||
let text = literal
|
||||
.with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied()));
|
||||
.with_stringify_parts(|parts| ::tt::SmolStr::from_iter(parts.iter().copied()));
|
||||
|
||||
let literal = tt::Literal { text, id: literal.0.span };
|
||||
let literal = tt::Literal { text, span: literal.0.span };
|
||||
let leaf = tt::Leaf::from(literal);
|
||||
let tree = TokenTree::from(leaf);
|
||||
Self::TokenStream::from_iter(vec![tree])
|
||||
|
|
@ -130,7 +133,7 @@ impl server::TokenStream for RustAnalyzer {
|
|||
let punct = tt::Punct {
|
||||
char: p.ch as char,
|
||||
spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
|
||||
id: p.span,
|
||||
span: p.span,
|
||||
};
|
||||
let leaf = tt::Leaf::from(punct);
|
||||
let tree = TokenTree::from(leaf);
|
||||
|
|
@ -184,7 +187,7 @@ impl server::TokenStream for RustAnalyzer {
|
|||
bridge::TokenTree::Ident(bridge::Ident {
|
||||
sym: Symbol::intern(ident.text.trim_start_matches("r#")),
|
||||
is_raw: ident.text.starts_with("r#"),
|
||||
span: ident.id,
|
||||
span: ident.span,
|
||||
})
|
||||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
||||
|
|
@ -194,14 +197,14 @@ impl server::TokenStream for RustAnalyzer {
|
|||
symbol: Symbol::intern(&lit.text),
|
||||
// FIXME: handle suffixes
|
||||
suffix: None,
|
||||
span: lit.id,
|
||||
span: lit.span,
|
||||
})
|
||||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
|
||||
bridge::TokenTree::Punct(bridge::Punct {
|
||||
ch: punct.char as u8,
|
||||
joint: punct.spacing == Spacing::Joint,
|
||||
span: punct.id,
|
||||
span: punct.span,
|
||||
})
|
||||
}
|
||||
tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
|
||||
|
|
@ -211,31 +214,29 @@ impl server::TokenStream for RustAnalyzer {
|
|||
} else {
|
||||
Some(subtree.token_trees.into_iter().collect())
|
||||
},
|
||||
span: bridge::DelimSpan::from_single(
|
||||
subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
|
||||
),
|
||||
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
|
||||
}),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn delim_to_internal(d: proc_macro::Delimiter) -> Option<tt::Delimiter> {
|
||||
fn delim_to_internal(d: proc_macro::Delimiter) -> tt::Delimiter {
|
||||
let kind = match d {
|
||||
proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
|
||||
proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
|
||||
proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
|
||||
proc_macro::Delimiter::None => return None,
|
||||
proc_macro::Delimiter::None => tt::DelimiterKind::Invisible,
|
||||
};
|
||||
Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
|
||||
tt::Delimiter { open: tt::TokenId::unspecified(), close: tt::TokenId::unspecified(), kind }
|
||||
}
|
||||
|
||||
fn delim_to_external(d: Option<tt::Delimiter>) -> proc_macro::Delimiter {
|
||||
match d.map(|it| it.kind) {
|
||||
Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis,
|
||||
Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace,
|
||||
Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket,
|
||||
None => proc_macro::Delimiter::None,
|
||||
fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter {
|
||||
match d.kind {
|
||||
tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
||||
tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace,
|
||||
tt::DelimiterKind::Bracket => proc_macro::Delimiter::Bracket,
|
||||
tt::DelimiterKind::Invisible => proc_macro::Delimiter::None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -349,7 +350,7 @@ impl server::Server for RustAnalyzer {
|
|||
}
|
||||
|
||||
fn intern_symbol(ident: &str) -> Self::Symbol {
|
||||
Symbol::intern(&tt::SmolStr::from(ident))
|
||||
Symbol::intern(&::tt::SmolStr::from(ident))
|
||||
}
|
||||
|
||||
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
|
||||
|
|
@ -413,17 +414,18 @@ mod tests {
|
|||
token_trees: vec![
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "struct".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
})),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "T".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
})),
|
||||
tt::TokenTree::Subtree(tt::Subtree {
|
||||
delimiter: Some(tt::Delimiter {
|
||||
id: tt::TokenId::unspecified(),
|
||||
delimiter: tt::Delimiter {
|
||||
open: tt::TokenId::unspecified(),
|
||||
close: tt::TokenId::unspecified(),
|
||||
kind: tt::DelimiterKind::Brace,
|
||||
}),
|
||||
},
|
||||
token_trees: vec![],
|
||||
}),
|
||||
],
|
||||
|
|
@ -436,13 +438,14 @@ mod tests {
|
|||
fn test_ra_server_from_str() {
|
||||
use std::str::FromStr;
|
||||
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
|
||||
delimiter: Some(tt::Delimiter {
|
||||
id: tt::TokenId::unspecified(),
|
||||
delimiter: tt::Delimiter {
|
||||
open: tt::TokenId::unspecified(),
|
||||
close: tt::TokenId::unspecified(),
|
||||
kind: tt::DelimiterKind::Parenthesis,
|
||||
}),
|
||||
},
|
||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "a".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
}))],
|
||||
});
|
||||
|
||||
|
|
@ -459,7 +462,7 @@ mod tests {
|
|||
underscore.token_trees[0],
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||
text: "_".into(),
|
||||
id: tt::TokenId::unspecified(),
|
||||
span: tt::TokenId::unspecified(),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
//! TokenStream implementation used by sysroot ABI
|
||||
|
||||
use tt::TokenTree;
|
||||
use crate::tt::{self, TokenTree};
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct TokenStream {
|
||||
|
|
@ -13,7 +13,7 @@ impl TokenStream {
|
|||
}
|
||||
|
||||
pub fn with_subtree(subtree: tt::Subtree) -> Self {
|
||||
if subtree.delimiter.is_some() {
|
||||
if subtree.delimiter.kind != tt::DelimiterKind::Invisible {
|
||||
TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
|
||||
} else {
|
||||
TokenStream { token_trees: subtree.token_trees }
|
||||
|
|
@ -21,7 +21,7 @@ impl TokenStream {
|
|||
}
|
||||
|
||||
pub fn into_subtree(self) -> tt::Subtree {
|
||||
tt::Subtree { delimiter: None, token_trees: self.token_trees }
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
|
|
@ -64,7 +64,9 @@ impl Extend<TokenStream> for TokenStream {
|
|||
for item in streams {
|
||||
for tkn in item {
|
||||
match tkn {
|
||||
tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
|
||||
tt::TokenTree::Subtree(subtree)
|
||||
if subtree.delimiter.kind != tt::DelimiterKind::Invisible =>
|
||||
{
|
||||
self.token_trees.extend(subtree.token_trees);
|
||||
}
|
||||
_ => {
|
||||
|
|
@ -84,7 +86,7 @@ pub struct TokenStreamBuilder {
|
|||
pub mod token_stream {
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::{TokenStream, TokenTree};
|
||||
use super::{tt, TokenStream, TokenTree};
|
||||
|
||||
/// An iterator over `TokenStream`'s `TokenTree`s.
|
||||
/// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
|
||||
|
|
@ -121,15 +123,17 @@ pub mod token_stream {
|
|||
|
||||
impl ToString for TokenStream {
|
||||
fn to_string(&self) -> String {
|
||||
tt::pretty(&self.token_trees)
|
||||
::tt::pretty(&self.token_trees)
|
||||
}
|
||||
}
|
||||
|
||||
fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
|
||||
tt::Subtree {
|
||||
delimiter: subtree
|
||||
.delimiter
|
||||
.map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
|
||||
delimiter: tt::Delimiter {
|
||||
open: tt::TokenId::UNSPECIFIED,
|
||||
close: tt::TokenId::UNSPECIFIED,
|
||||
..subtree.delimiter
|
||||
},
|
||||
token_trees: subtree
|
||||
.token_trees
|
||||
.into_iter()
|
||||
|
|
@ -152,13 +156,13 @@ pub mod token_stream {
|
|||
fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
|
||||
match leaf {
|
||||
tt::Leaf::Literal(lit) => {
|
||||
tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
|
||||
tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
|
||||
}
|
||||
tt::Leaf::Punct(punct) => {
|
||||
tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
|
||||
tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
|
||||
}
|
||||
tt::Leaf::Ident(ident) => {
|
||||
tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
|
||||
tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,6 +41,8 @@ pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
|
|||
use libloading::Library;
|
||||
use proc_macro_api::{ProcMacroKind, RustCInfo};
|
||||
|
||||
use crate::tt;
|
||||
|
||||
pub struct PanicMessage {
|
||||
message: Option<String>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ use object::Object;
|
|||
use paths::AbsPath;
|
||||
use proc_macro_api::{read_dylib_info, ProcMacroKind};
|
||||
|
||||
use crate::tt;
|
||||
|
||||
use super::abis::Abi;
|
||||
|
||||
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
|
||||
|
|
|
|||
|
|
@ -37,6 +37,8 @@ use proc_macro_api::{
|
|||
ProcMacroKind,
|
||||
};
|
||||
|
||||
use ::tt::token_id as tt;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct ProcMacroSrv {
|
||||
expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use expect_test::expect;
|
|||
|
||||
#[test]
|
||||
fn test_derive_empty() {
|
||||
assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
|
||||
assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 4294967295 4294967295"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -17,10 +17,10 @@ fn test_derive_error() {
|
|||
"DeriveError",
|
||||
r#"struct S;"#,
|
||||
expect![[r##"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
IDENT compile_error 4294967295
|
||||
PUNCH ! [alone] 4294967295
|
||||
SUBTREE () 4294967295
|
||||
SUBTREE () 4294967295 4294967295
|
||||
LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
|
||||
PUNCH ; [alone] 4294967295"##]],
|
||||
);
|
||||
|
|
@ -32,14 +32,14 @@ fn test_fn_like_macro_noop() {
|
|||
"fn_like_noop",
|
||||
r#"ident, 0, 1, []"#,
|
||||
expect![[r#"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
IDENT ident 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
LITERAL 0 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
LITERAL 1 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
SUBTREE [] 4294967295"#]],
|
||||
SUBTREE [] 4294967295 4294967295"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -49,10 +49,10 @@ fn test_fn_like_macro_clone_ident_subtree() {
|
|||
"fn_like_clone_tokens",
|
||||
r#"ident, []"#,
|
||||
expect![[r#"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
IDENT ident 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
SUBTREE [] 4294967295"#]],
|
||||
SUBTREE [] 4294967295 4294967295"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ fn test_fn_like_macro_clone_raw_ident() {
|
|||
"fn_like_clone_tokens",
|
||||
"r#async",
|
||||
expect![[r#"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
IDENT r#async 4294967295"#]],
|
||||
);
|
||||
}
|
||||
|
|
@ -73,7 +73,7 @@ fn test_fn_like_mk_literals() {
|
|||
"fn_like_mk_literals",
|
||||
r#""#,
|
||||
expect![[r#"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
LITERAL b"byte_string" 4294967295
|
||||
LITERAL 'c' 4294967295
|
||||
LITERAL "string" 4294967295
|
||||
|
|
@ -90,7 +90,7 @@ fn test_fn_like_mk_idents() {
|
|||
"fn_like_mk_idents",
|
||||
r#""#,
|
||||
expect![[r#"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
IDENT standard 4294967295
|
||||
IDENT r#raw 4294967295"#]],
|
||||
);
|
||||
|
|
@ -102,7 +102,7 @@ fn test_fn_like_macro_clone_literals() {
|
|||
"fn_like_clone_tokens",
|
||||
r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
|
||||
expect![[r#"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
LITERAL 1u16 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
LITERAL 2_u32 4294967295
|
||||
|
|
@ -126,10 +126,10 @@ fn test_attr_macro() {
|
|||
r#"mod m {}"#,
|
||||
r#"some arguments"#,
|
||||
expect![[r##"
|
||||
SUBTREE $
|
||||
SUBTREE $$ 4294967295 4294967295
|
||||
IDENT compile_error 4294967295
|
||||
PUNCH ! [alone] 4294967295
|
||||
SUBTREE () 4294967295
|
||||
SUBTREE () 4294967295 4294967295
|
||||
LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
|
||||
PUNCH ; [alone] 4294967295"##]],
|
||||
);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue