Auto merge of #17799 - Veykril:syntax-bridge, r=Veykril

Split out syntax-bridge into a separate crate

This functionality is not really tied to mbe macros, so imo it has no place in that crate.
This commit is contained in:
bors 2024-08-05 12:31:45 +00:00
commit fbed308ebe
30 changed files with 268 additions and 140 deletions

View file

@ -6,14 +6,12 @@ use cfg::CfgExpr;
use either::Either;
use intern::{sym, Interned, Symbol};
use mbe::{
desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
Punct,
};
use mbe::{DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId};
use syntax::unescape;
use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode};
use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode};
use triomphe::ThinArc;
use crate::name::Name;

View file

@ -2,10 +2,10 @@
use intern::sym;
use itertools::izip;
use mbe::DocCommentDesugarMode;
use rustc_hash::FxHashSet;
use span::{MacroCallId, Span};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
use crate::{
@ -209,9 +209,9 @@ struct BasicAdtInfo {
}
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
let (parsed, tm) = &mbe::token_tree_to_syntax_node(
let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node(
tt,
mbe::TopEntryPoint::MacroItems,
syntax_bridge::TopEntryPoint::MacroItems,
parser::Edition::CURRENT_FIXME,
);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
@ -268,7 +268,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
match this {
Some(it) => {
param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,
@ -282,7 +282,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,
@ -295,7 +295,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
let ty = param
.ty()
.map(|ty| {
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
ty.syntax(),
tm,
call_site,
@ -316,7 +316,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
let where_clause = if let Some(w) = where_clause {
w.predicates()
.map(|it| {
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,
@ -353,7 +353,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
param_type_set.contains(&name).then_some(p)
})
.map(|it| {
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,

View file

@ -4,13 +4,14 @@ use base_db::AnchoredPath;
use cfg::CfgExpr;
use either::Either;
use intern::{sym, Symbol};
use mbe::{parse_exprs_with_sep, parse_to_token_tree, DelimiterKind};
use mbe::{expect_fragment, DelimiterKind};
use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{
format_smolstr,
unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
};
use syntax_bridge::parse_to_token_tree;
use crate::{
builtin::quote::{dollar_crate, quote},
@ -228,20 +229,22 @@ fn assert_expand(
span: Span,
) -> ExpandResult<tt::Subtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id);
let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME);
let mut iter = ::tt::iter::TtIter::new(tt);
let cond = expect_fragment(
&mut iter,
parser::PrefixEntryPoint::Expr,
db.crate_graph()[id.lookup(db).krate].edition,
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
);
_ = iter.expect_char(',');
let rest = iter.as_slice();
let dollar_crate = dollar_crate(span);
let expanded = match &*args {
[cond, panic_args @ ..] => {
let comma = tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(call_site_span),
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span: call_site_span,
}))]),
};
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
let expanded = match cond.value {
Some(cond) => {
let panic_args = rest.iter().cloned();
let mac = if use_panic_2021(db, span) {
quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
} else {
@ -253,10 +256,13 @@ fn assert_expand(
}
}}
}
[] => quote! {call_site_span =>{}},
None => quote! {call_site_span =>{}},
};
ExpandResult::ok(expanded)
match cond.err {
Some(err) => ExpandResult::new(expanded, err.into()),
None => ExpandResult::ok(expanded),
}
}
fn file_expand(

View file

@ -3,10 +3,11 @@
use base_db::{salsa, CrateId, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex};
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
use triomphe::Arc;
use crate::{
@ -165,7 +166,7 @@ pub fn expand_speculative(
// Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => (
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
speculative_args,
span_map,
span,
@ -178,7 +179,7 @@ pub fn expand_speculative(
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
speculative_args,
span_map,
span,
@ -213,7 +214,7 @@ pub fn expand_speculative(
fixups.remove.extend(censor_cfg);
(
mbe::syntax_node_to_token_tree_modified(
syntax_bridge::syntax_node_to_token_tree_modified(
speculative_args,
span_map,
fixups.append,
@ -459,7 +460,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
return dummy_tt(kind);
}
let mut tt = mbe::syntax_node_to_token_tree(
let mut tt = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),
span,
@ -515,7 +516,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
fixups.remove.extend(censor_cfg);
(
mbe::syntax_node_to_token_tree_modified(
syntax_bridge::syntax_node_to_token_tree_modified(
syntax,
map,
fixups.append,
@ -720,13 +721,13 @@ fn token_tree_to_syntax_node(
edition: parser::Edition,
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts,
ExpandTo::Items => syntax_bridge::TopEntryPoint::MacroItems,
ExpandTo::Pattern => syntax_bridge::TopEntryPoint::Pattern,
ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
};
mbe::token_tree_to_syntax_node(tt, entry_point, edition)
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
}
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {

View file

@ -2,10 +2,10 @@
use base_db::CrateId;
use intern::sym;
use mbe::DocCommentDesugarMode;
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use stdx::TupleExt;
use syntax::{ast, AstNode};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
@ -112,7 +112,7 @@ impl DeclarativeMacroExpander {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(
let tt = syntax_bridge::syntax_node_to_token_tree(
arg.syntax(),
map.as_ref(),
map.span_for_range(
@ -135,14 +135,14 @@ impl DeclarativeMacroExpander {
let span =
map.span_for_range(macro_def.macro_token().unwrap().text_range());
let args = macro_def.args().map(|args| {
mbe::syntax_node_to_token_tree(
syntax_bridge::syntax_node_to_token_tree(
args.syntax(),
map.as_ref(),
span,
DocCommentDesugarMode::Mbe,
)
});
let body = mbe::syntax_node_to_token_tree(
let body = syntax_bridge::syntax_node_to_token_tree(
body.syntax(),
map.as_ref(),
span,

View file

@ -19,9 +19,9 @@
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
use mbe::DocCommentDesugarMode;
use span::SyntaxContextId;
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
@ -82,7 +82,7 @@ pub fn expand_eager_macro_input(
return ExpandResult { value: None, err };
};
let mut subtree = mbe::syntax_node_to_token_tree(
let mut subtree = syntax_bridge::syntax_node_to_token_tree(
&expanded_eager_input,
arg_map,
span,

View file

@ -2,7 +2,6 @@
//! fix up syntax errors in the code we're passing to them.
use intern::sym;
use mbe::DocCommentDesugarMode;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use span::{
@ -14,6 +13,7 @@ use syntax::{
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use tt::Spacing;
@ -76,7 +76,8 @@ pub(crate) fn fixup_syntax(
if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode);
let original_tree =
syntax_bridge::syntax_node_to_token_tree(&node, span_map, call_site, mode);
let idx = original.len() as u32;
original.push(original_tree);
let span = span_map.span_for_range(node_range);
@ -434,9 +435,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use mbe::DocCommentDesugarMode;
use span::{Edition, EditionedFileId, FileId};
use syntax::TextRange;
use syntax_bridge::DocCommentDesugarMode;
use triomphe::Arc;
use crate::{
@ -483,7 +484,7 @@ mod tests {
span_map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::Mbe,
);
let mut tt = mbe::syntax_node_to_token_tree_modified(
let mut tt = syntax_bridge::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
span_map.as_ref(),
fixups.append,
@ -498,9 +499,9 @@ mod tests {
expect.assert_eq(&actual);
// the fixed-up tree should be syntactically valid
let (parse, _) = mbe::token_tree_to_syntax_node(
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
&tt,
::mbe::TopEntryPoint::MacroItems,
syntax_bridge::TopEntryPoint::MacroItems,
parser::Edition::CURRENT,
);
assert!(
@ -513,7 +514,7 @@ mod tests {
// the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing.
let original_as_tt = mbe::syntax_node_to_token_tree(
let original_as_tt = syntax_bridge::syntax_node_to_token_tree(
&parsed.syntax_node(),
span_map.as_ref(),
span_map.span_for_range(TextRange::empty(0.into())),

View file

@ -28,13 +28,13 @@ pub enum SpanMapRef<'a> {
RealSpanMap(&'a RealSpanMap),
}
impl mbe::SpanMapper<Span> for SpanMap {
impl syntax_bridge::SpanMapper<Span> for SpanMap {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
impl syntax_bridge::SpanMapper<Span> for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> Span {
self.span_for_range(range)
}