Merge commit 'ac998a74b3' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2024-02-18 09:41:20 +02:00
parent d33d8675d0
commit 6b17dba68c
178 changed files with 7101 additions and 1965 deletions

View file

@ -20,6 +20,11 @@ extern crate proc_macro;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_driver as _;
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;
mod dylib;
mod proc_macros;
mod server;

View file

@ -70,11 +70,58 @@ impl server::FreeFunctions for RaSpanServer {
&mut self,
s: &str,
) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
// FIXME: keep track of LitKind and Suffix
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
let mut tokens = rustc_lexer::tokenize(s);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
let lit = tokens.next().ok_or(())?;
if !matches!(
lit.kind,
TokenKind::Literal {
kind: LiteralKind::Int { .. } | LiteralKind::Float { .. },
..
}
) {
return Err(());
}
lit
} else {
minus_or_lit
};
if tokens.next().is_some() {
return Err(());
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind {
LiteralKind::Int { .. } => LitKind::Integer,
LiteralKind::Float { .. } => LitKind::Float,
LiteralKind::Char { .. } => LitKind::Char,
LiteralKind::Byte { .. } => LitKind::Byte,
LiteralKind::Str { .. } => LitKind::Str,
LiteralKind::ByteStr { .. } => LitKind::ByteStr,
LiteralKind::CStr { .. } => LitKind::CStr,
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()),
LiteralKind::RawByteStr { n_hashes } => {
LitKind::ByteStrRaw(n_hashes.unwrap_or_default())
}
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()),
};
let (lit, suffix) = s.split_at(suffix_start as usize);
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)),
};
Ok(bridge::Literal {
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, s),
suffix: None,
kind,
symbol: Symbol::intern(self.interner, lit),
suffix,
span: self.call_site,
})
}

View file

@ -62,11 +62,58 @@ impl server::FreeFunctions for TokenIdServer {
&mut self,
s: &str,
) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
// FIXME: keep track of LitKind and Suffix
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
let mut tokens = rustc_lexer::tokenize(s);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
let lit = tokens.next().ok_or(())?;
if !matches!(
lit.kind,
TokenKind::Literal {
kind: LiteralKind::Int { .. } | LiteralKind::Float { .. },
..
}
) {
return Err(());
}
lit
} else {
minus_or_lit
};
if tokens.next().is_some() {
return Err(());
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind {
LiteralKind::Int { .. } => LitKind::Integer,
LiteralKind::Float { .. } => LitKind::Float,
LiteralKind::Char { .. } => LitKind::Char,
LiteralKind::Byte { .. } => LitKind::Byte,
LiteralKind::Str { .. } => LitKind::Str,
LiteralKind::ByteStr { .. } => LitKind::ByteStr,
LiteralKind::CStr { .. } => LitKind::CStr,
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()),
LiteralKind::RawByteStr { n_hashes } => {
LitKind::ByteStrRaw(n_hashes.unwrap_or_default())
}
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()),
};
let (lit, suffix) = s.split_at(suffix_start as usize);
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)),
};
Ok(bridge::Literal {
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, s),
suffix: None,
kind,
symbol: Symbol::intern(self.interner, lit),
suffix,
span: self.call_site,
})
}

View file

@ -169,8 +169,8 @@ fn test_fn_like_mk_idents() {
fn test_fn_like_macro_clone_literals() {
assert_expand(
"fn_like_clone_tokens",
r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
expect![[r#"
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###,
expect![[r###"
SUBTREE $$ 1 1
LITERAL 1u16 1
PUNCH , [alone] 1
@ -181,8 +181,12 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] 1
LITERAL 3.14f32 1
PUNCH , [alone] 1
LITERAL "hello bridge" 1"#]],
expect![[r#"
LITERAL ""hello bridge"" 1
PUNCH , [alone] 1
LITERAL ""suffixed""suffix 1
PUNCH , [alone] 1
LITERAL r##"r##"raw"##"## 1"###]],
expect![[r###"
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
@ -193,7 +197,11 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]],
);
}