fix: Fix proc-macro server not accounting for string delimiters correctly

This commit is contained in:
Lukas Wirth 2024-02-22 22:25:55 +01:00
parent 543d7e98db
commit cdfb73ab9c
6 changed files with 85 additions and 55 deletions

View file

@ -97,22 +97,33 @@ impl server::FreeFunctions for RaSpanServer {
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind {
LiteralKind::Int { .. } => LitKind::Integer,
LiteralKind::Float { .. } => LitKind::Float,
LiteralKind::Char { .. } => LitKind::Char,
LiteralKind::Byte { .. } => LitKind::Byte,
LiteralKind::Str { .. } => LitKind::Str,
LiteralKind::ByteStr { .. } => LitKind::ByteStr,
LiteralKind::CStr { .. } => LitKind::CStr,
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()),
LiteralKind::RawByteStr { n_hashes } => {
LitKind::ByteStrRaw(n_hashes.unwrap_or_default())
}
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()),
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};
let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)),
@ -248,12 +259,8 @@ impl server::TokenStream for RaSpanServer {
}
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal {
// FIXME: handle literal kinds
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, &lit.text),
// FIXME: handle suffixes
suffix: None,
span: lit.span,
..server::FreeFunctions::literal_from_str(self, &lit.text).unwrap()
})
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {

View file

@ -89,22 +89,34 @@ impl server::FreeFunctions for TokenIdServer {
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind {
LiteralKind::Int { .. } => LitKind::Integer,
LiteralKind::Float { .. } => LitKind::Float,
LiteralKind::Char { .. } => LitKind::Char,
LiteralKind::Byte { .. } => LitKind::Byte,
LiteralKind::Str { .. } => LitKind::Str,
LiteralKind::ByteStr { .. } => LitKind::ByteStr,
LiteralKind::CStr { .. } => LitKind::CStr,
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()),
LiteralKind::RawByteStr { n_hashes } => {
LitKind::ByteStrRaw(n_hashes.unwrap_or_default())
}
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()),
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};
let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)),
@ -233,12 +245,9 @@ impl server::TokenStream for TokenIdServer {
}
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal {
// FIXME: handle literal kinds
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, &lit.text),
// FIXME: handle suffixes
suffix: None,
span: lit.span,
..server::FreeFunctions::literal_from_str(self, &lit.text)
.unwrap_or_else(|_| panic!("`{}`", lit.text))
})
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {

View file

@ -115,8 +115,6 @@ pub(super) mod token_stream {
}
}
type LexError = String;
/// Attempts to break the string into tokens and parse those tokens into a token stream.
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
/// or characters not existing in the language.
@ -124,13 +122,10 @@ pub(super) mod token_stream {
///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
#[rustfmt::skip]
impl<S: tt::Span> /*FromStr for*/ TokenStream<S> {
// type Err = LexError;
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, LexError> {
impl<S: tt::Span> TokenStream<S> {
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
let subtree =
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?;
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?;
Ok(TokenStream::with_subtree(subtree))
}