mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-03 07:04:49 +00:00
Re-implement syntax fixups
This commit is contained in:
parent
7a8c4c001b
commit
f48fa0c6cb
7 changed files with 406 additions and 262 deletions
|
@ -34,8 +34,6 @@ impl SyntaxContextId {
|
||||||
// we need a special value that behaves as the current context.
|
// we need a special value that behaves as the current context.
|
||||||
pub const SELF_REF: Self =
|
pub const SELF_REF: Self =
|
||||||
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
||||||
// Used for syntax fixups
|
|
||||||
pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) });
|
|
||||||
|
|
||||||
pub fn is_root(self) -> bool {
|
pub fn is_root(self) -> bool {
|
||||||
self == Self::ROOT
|
self == Self::ROOT
|
||||||
|
|
|
@ -9,6 +9,7 @@ use base_db::{
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||||
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs, HasDocComments},
|
ast::{self, HasAttrs, HasDocComments},
|
||||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||||
|
@ -20,6 +21,7 @@ use crate::{
|
||||||
attrs::RawAttrs,
|
attrs::RawAttrs,
|
||||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander,
|
builtin_fn_macro::EagerExpander,
|
||||||
|
fixup::{self, SyntaxFixupUndoInfo},
|
||||||
hygiene::{self, SyntaxContextData, Transparency},
|
hygiene::{self, SyntaxContextData, Transparency},
|
||||||
span::{RealSpanMap, SpanMap, SpanMapRef},
|
span::{RealSpanMap, SpanMap, SpanMapRef},
|
||||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
||||||
|
@ -135,7 +137,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
&self,
|
&self,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>>;
|
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
|
||||||
/// Fetches the expander for this macro.
|
/// Fetches the expander for this macro.
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||||
|
@ -189,15 +191,33 @@ pub fn expand_speculative(
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||||
|
|
||||||
// Build the subtree and token mapping for the speculative args
|
|
||||||
let _censor = censor_for_macro_input(&loc, speculative_args);
|
|
||||||
let span_map = RealSpanMap::absolute(SpanAnchor::DUMMY.file_id);
|
let span_map = RealSpanMap::absolute(SpanAnchor::DUMMY.file_id);
|
||||||
let span_map = SpanMapRef::RealSpanMap(&span_map);
|
let span_map = SpanMapRef::RealSpanMap(&span_map);
|
||||||
let mut tt = mbe::syntax_node_to_token_tree(
|
|
||||||
|
// Build the subtree and token mapping for the speculative args
|
||||||
|
let (mut tt, undo_info) = match loc.kind {
|
||||||
|
MacroCallKind::FnLike { .. } => {
|
||||||
|
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
|
||||||
|
}
|
||||||
|
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||||
|
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||||
|
let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
|
||||||
|
fixups.append.retain(|it, _| match it {
|
||||||
|
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||||
|
syntax::NodeOrToken::Token(_) => true,
|
||||||
|
});
|
||||||
|
fixups.remove.extend(censor);
|
||||||
|
(
|
||||||
|
mbe::syntax_node_to_token_tree_modified(
|
||||||
speculative_args,
|
speculative_args,
|
||||||
// we don't leak these spans into any query so its fine to make them absolute
|
|
||||||
span_map,
|
span_map,
|
||||||
);
|
fixups.append,
|
||||||
|
fixups.remove,
|
||||||
|
),
|
||||||
|
fixups.undo_info,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let attr_arg = match loc.kind {
|
let attr_arg = match loc.kind {
|
||||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||||
|
@ -227,7 +247,7 @@ pub fn expand_speculative(
|
||||||
|
|
||||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||||
let speculative_expansion = match loc.def.kind {
|
let mut speculative_expansion = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(expander, ..) => {
|
MacroDefKind::ProcMacro(expander, ..) => {
|
||||||
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
||||||
let call_site = loc.span(db);
|
let call_site = loc.span(db);
|
||||||
|
@ -261,6 +281,7 @@ pub fn expand_speculative(
|
||||||
};
|
};
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||||
|
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
|
||||||
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
||||||
|
|
||||||
let syntax_node = node.syntax_node();
|
let syntax_node = node.syntax_node();
|
||||||
|
@ -347,7 +368,9 @@ fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxN
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>> {
|
// FIXME: consider the following by putting fixup info into eager call info args
|
||||||
|
// ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
|
||||||
|
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
|
||||||
let mismatched_delimiters = |arg: &SyntaxNode| {
|
let mismatched_delimiters = |arg: &SyntaxNode| {
|
||||||
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
||||||
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
||||||
|
@ -375,7 +398,7 @@ fn macro_arg(
|
||||||
.then(|| loc.eager.as_deref())
|
.then(|| loc.eager.as_deref())
|
||||||
.flatten()
|
.flatten()
|
||||||
{
|
{
|
||||||
ValueResult::ok(Some(arg.clone()))
|
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
|
||||||
} else {
|
} else {
|
||||||
let (parse, map) = parse_with_map(db, loc.kind.file_id());
|
let (parse, map) = parse_with_map(db, loc.kind.file_id());
|
||||||
let root = parse.syntax_node();
|
let root = parse.syntax_node();
|
||||||
|
@ -404,22 +427,27 @@ fn macro_arg(
|
||||||
}
|
}
|
||||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
||||||
};
|
};
|
||||||
let censor = censor_for_macro_input(&loc, &syntax);
|
let (mut tt, undo_info) = match loc.kind {
|
||||||
let mut tt = match loc.kind {
|
|
||||||
MacroCallKind::FnLike { .. } => {
|
MacroCallKind::FnLike { .. } => {
|
||||||
mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
|
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
|
||||||
}
|
}
|
||||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||||
// let mut fixups = crate::fixup::fixup_syntax(&syntax);
|
let censor = censor_for_macro_input(&loc, &syntax);
|
||||||
// fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
|
||||||
// let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
fixups.append.retain(|it, _| match it {
|
||||||
// &node,
|
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||||
// fixups.token_map,
|
syntax::NodeOrToken::Token(_) => true,
|
||||||
// fixups.next_id,
|
});
|
||||||
// fixups.replace,
|
fixups.remove.extend(censor);
|
||||||
// fixups.append,
|
(
|
||||||
// );
|
mbe::syntax_node_to_token_tree_modified(
|
||||||
mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
|
&syntax,
|
||||||
|
map,
|
||||||
|
fixups.append,
|
||||||
|
fixups.remove,
|
||||||
|
),
|
||||||
|
fixups.undo_info,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -430,15 +458,15 @@ fn macro_arg(
|
||||||
|
|
||||||
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
||||||
match parse.errors() {
|
match parse.errors() {
|
||||||
[] => ValueResult::ok(Some(Arc::new(tt))),
|
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
|
||||||
errors => ValueResult::new(
|
errors => ValueResult::new(
|
||||||
Some(Arc::new(tt)),
|
Some((Arc::new(tt), undo_info)),
|
||||||
// Box::<[_]>::from(res.errors()), not stable yet
|
// Box::<[_]>::from(res.errors()), not stable yet
|
||||||
Arc::new(errors.to_vec().into_boxed_slice()),
|
Arc::new(errors.to_vec().into_boxed_slice()),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ValueResult::ok(Some(Arc::new(tt)))
|
ValueResult::ok(Some((Arc::new(tt), undo_info)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -447,7 +475,7 @@ fn macro_arg(
|
||||||
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||||
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||||
/// - attributes expect the invoking attribute to be stripped
|
/// - attributes expect the invoking attribute to be stripped
|
||||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Vec<SyntaxNode> {
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
(|| {
|
(|| {
|
||||||
let censor = match loc.kind {
|
let censor = match loc.kind {
|
||||||
|
@ -574,13 +602,13 @@ fn macro_expand(
|
||||||
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
|
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
|
||||||
let node = ast_id.to_ptr(db).to_node(&root);
|
let node = ast_id.to_ptr(db).to_node(&root);
|
||||||
|
|
||||||
// FIXME: we might need to remove the spans from the input to the derive macro here
|
// FIXME: Use censoring
|
||||||
let _censor = censor_for_macro_input(&loc, node.syntax());
|
let _censor = censor_for_macro_input(&loc, node.syntax());
|
||||||
expander.expand(db, macro_call_id, &node, map.as_ref())
|
expander.expand(db, macro_call_id, &node, map.as_ref())
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let ValueResult { value, err } = db.macro_arg(macro_call_id);
|
let ValueResult { value, err } = db.macro_arg(macro_call_id);
|
||||||
let Some(macro_arg) = value else {
|
let Some((macro_arg, undo_info)) = value else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
|
@ -608,7 +636,7 @@ fn macro_expand(
|
||||||
// As such we just return the input subtree here.
|
// As such we just return the input subtree here.
|
||||||
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(arg.clone()),
|
value: macro_arg.clone(),
|
||||||
err: err.map(|err| {
|
err: err.map(|err| {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
for err in &**err {
|
for err in &**err {
|
||||||
|
@ -624,7 +652,11 @@ fn macro_expand(
|
||||||
MacroDefKind::BuiltInEager(it, _) => {
|
MacroDefKind::BuiltInEager(it, _) => {
|
||||||
it.expand(db, macro_call_id, &arg).map_err(Into::into)
|
it.expand(db, macro_call_id, &arg).map_err(Into::into)
|
||||||
}
|
}
|
||||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, macro_call_id, &arg),
|
MacroDefKind::BuiltInAttr(it, _) => {
|
||||||
|
let mut res = it.expand(db, macro_call_id, &arg);
|
||||||
|
fixup::reverse_fixups(&mut res.value, &undo_info);
|
||||||
|
res
|
||||||
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -647,9 +679,8 @@ fn macro_expand(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||||
// FIXME: Syntax fix ups
|
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
let Some(macro_arg) = db.macro_arg(id).value else {
|
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
|
@ -672,7 +703,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
};
|
};
|
||||||
|
|
||||||
let call_site = loc.span(db);
|
let call_site = loc.span(db);
|
||||||
let ExpandResult { value: tt, err } = expander.expand(
|
let ExpandResult { value: mut tt, err } = expander.expand(
|
||||||
db,
|
db,
|
||||||
loc.def.krate,
|
loc.def.krate,
|
||||||
loc.krate,
|
loc.krate,
|
||||||
|
@ -690,6 +721,8 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fixup::reverse_fixups(&mut tt, &undo_info);
|
||||||
|
|
||||||
ExpandResult { value: Arc::new(tt), err }
|
ExpandResult { value: Arc::new(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,22 +1,24 @@
|
||||||
//! To make attribute macros work reliably when typing, we need to take care to
|
//! To make attribute macros work reliably when typing, we need to take care to
|
||||||
//! fix up syntax errors in the code we're passing to them.
|
//! fix up syntax errors in the code we're passing to them.
|
||||||
use std::mem;
|
|
||||||
|
|
||||||
use base_db::{
|
use base_db::{
|
||||||
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
|
span::{ErasedFileAstId, SpanAnchor, SpanData},
|
||||||
FileId,
|
FileId,
|
||||||
};
|
};
|
||||||
use la_arena::RawIdx;
|
use la_arena::RawIdx;
|
||||||
use mbe::TokenMap;
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasLoopBody},
|
ast::{self, AstNode, HasLoopBody},
|
||||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||||
};
|
};
|
||||||
|
use triomphe::Arc;
|
||||||
use tt::Spacing;
|
use tt::Spacing;
|
||||||
|
|
||||||
use crate::tt::{Ident, Leaf, Punct, Subtree};
|
use crate::{
|
||||||
|
span::SpanMapRef,
|
||||||
|
tt::{Ident, Leaf, Punct, Subtree},
|
||||||
|
};
|
||||||
|
|
||||||
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
||||||
/// (appending to and replacing nodes), the information that is needed to
|
/// (appending to and replacing nodes), the information that is needed to
|
||||||
|
@ -24,14 +26,19 @@ use crate::tt::{Ident, Leaf, Punct, Subtree};
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub(crate) struct SyntaxFixups {
|
pub(crate) struct SyntaxFixups {
|
||||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
||||||
pub(crate) replace: FxHashMap<SyntaxElement, Vec<()>>,
|
pub(crate) remove: FxHashSet<SyntaxNode>,
|
||||||
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is the information needed to reverse the fixups.
|
/// This is the information needed to reverse the fixups.
|
||||||
#[derive(Debug, Default, PartialEq, Eq)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
pub struct SyntaxFixupUndoInfo {
|
pub struct SyntaxFixupUndoInfo {
|
||||||
original: Box<[Subtree]>,
|
// FIXME: ThinArc<[Subtree]>
|
||||||
|
original: Option<Arc<Box<[Subtree]>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SyntaxFixupUndoInfo {
|
||||||
|
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
|
||||||
}
|
}
|
||||||
|
|
||||||
// censoring -> just don't convert the node
|
// censoring -> just don't convert the node
|
||||||
|
@ -39,47 +46,45 @@ pub struct SyntaxFixupUndoInfo {
|
||||||
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
|
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
|
||||||
// to remove later
|
// to remove later
|
||||||
|
|
||||||
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
|
||||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut replace = FxHashMap::<SyntaxElement, _>::default();
|
let mut remove = FxHashSet::<SyntaxNode>::default();
|
||||||
let mut preorder = node.preorder();
|
let mut preorder = node.preorder();
|
||||||
let mut original = Vec::new();
|
let mut original = Vec::new();
|
||||||
let dummy_range = TextRange::empty(TextSize::new(0));
|
let dummy_range = TextRange::empty(TextSize::new(0));
|
||||||
|
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
|
||||||
|
// the index into the replacement vec but only if the end points to !0
|
||||||
let dummy_anchor =
|
let dummy_anchor =
|
||||||
SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) };
|
SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)) };
|
||||||
let fake_span =
|
let fake_span = |range| SpanData {
|
||||||
SpanData { range: dummy_range, anchor: dummy_anchor, ctx: SyntaxContextId::FAKE };
|
range: dummy_range,
|
||||||
|
anchor: dummy_anchor,
|
||||||
|
ctx: span_map.span_for_range(range).ctx,
|
||||||
|
};
|
||||||
while let Some(event) = preorder.next() {
|
while let Some(event) = preorder.next() {
|
||||||
let syntax::WalkEvent::Enter(node) = event else { continue };
|
let syntax::WalkEvent::Enter(node) = event else { continue };
|
||||||
|
|
||||||
/*
|
let node_range = node.text_range();
|
||||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||||
|
remove.insert(node.clone().into());
|
||||||
// the node contains an error node, we have to completely replace it by something valid
|
// the node contains an error node, we have to completely replace it by something valid
|
||||||
let (original_tree, new_tmap, new_next_id) =
|
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
|
||||||
mbe::syntax_node_to_token_tree_with_modifications(
|
|
||||||
&node,
|
|
||||||
mem::take(&mut token_map),
|
|
||||||
next_id,
|
|
||||||
Default::default(),
|
|
||||||
Default::default(),
|
|
||||||
);
|
|
||||||
token_map = new_tmap;
|
|
||||||
next_id = new_next_id;
|
|
||||||
let idx = original.len() as u32;
|
let idx = original.len() as u32;
|
||||||
original.push(original_tree);
|
original.push(original_tree);
|
||||||
let replacement = SyntheticToken {
|
let replacement = Leaf::Ident(Ident {
|
||||||
kind: SyntaxKind::IDENT,
|
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
range: node.text_range(),
|
span: SpanData {
|
||||||
id: SyntheticTokenId(idx),
|
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
|
||||||
};
|
anchor: dummy_anchor,
|
||||||
replace.insert(node.clone().into(), vec![replacement]);
|
ctx: span_map.span_for_range(node_range).ctx,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
append.insert(node.clone().into(), vec![replacement]);
|
||||||
preorder.skip_subtree();
|
preorder.skip_subtree();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
// In some other situations, we can fix things by just appending some tokens.
|
// In some other situations, we can fix things by just appending some tokens.
|
||||||
let end_range = TextRange::empty(node.text_range().end());
|
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match node {
|
match node {
|
||||||
ast::FieldExpr(it) => {
|
ast::FieldExpr(it) => {
|
||||||
|
@ -88,7 +93,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
Leaf::Ident(Ident {
|
Leaf::Ident(Ident {
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
span: fake_span
|
span: fake_span(node_range),
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -99,7 +104,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: ';',
|
char: ';',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range),
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -110,7 +115,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: ';',
|
char: ';',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -125,7 +130,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
append.insert(if_token.into(), vec![
|
append.insert(if_token.into(), vec![
|
||||||
Leaf::Ident(Ident {
|
Leaf::Ident(Ident {
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -135,12 +140,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '{',
|
char: '{',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '}',
|
char: '}',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -155,7 +160,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
append.insert(while_token.into(), vec![
|
append.insert(while_token.into(), vec![
|
||||||
Leaf::Ident(Ident {
|
Leaf::Ident(Ident {
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -165,12 +170,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '{',
|
char: '{',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '}',
|
char: '}',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -182,12 +187,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '{',
|
char: '{',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '}',
|
char: '}',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -202,7 +207,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
append.insert(match_token.into(), vec![
|
append.insert(match_token.into(), vec![
|
||||||
Leaf::Ident(Ident {
|
Leaf::Ident(Ident {
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -213,12 +218,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '{',
|
char: '{',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '}',
|
char: '}',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -236,7 +241,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
].map(|text|
|
].map(|text|
|
||||||
Leaf::Ident(Ident {
|
Leaf::Ident(Ident {
|
||||||
text: text.into(),
|
text: text.into(),
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -253,12 +258,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '{',
|
char: '{',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
Leaf::Punct(Punct {
|
Leaf::Punct(Punct {
|
||||||
char: '}',
|
char: '}',
|
||||||
spacing: Spacing::Alone,
|
spacing: Spacing::Alone,
|
||||||
span: fake_span
|
span: fake_span(node_range)
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
@ -267,10 +272,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
let needs_fixups = !append.is_empty() || !original.is_empty();
|
||||||
SyntaxFixups {
|
SyntaxFixups {
|
||||||
append,
|
append,
|
||||||
replace,
|
remove,
|
||||||
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
|
undo_info: SyntaxFixupUndoInfo {
|
||||||
|
original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -287,42 +295,55 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
||||||
|
let Some(undo_info) = undo_info.original.as_deref() else { return };
|
||||||
|
let undo_info = &**undo_info;
|
||||||
|
reverse_fixups_(tt, undo_info);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
||||||
let tts = std::mem::take(&mut tt.token_trees);
|
let tts = std::mem::take(&mut tt.token_trees);
|
||||||
tt.token_trees = tts
|
tt.token_trees = tts
|
||||||
.into_iter()
|
.into_iter()
|
||||||
// delete all fake nodes
|
// delete all fake nodes
|
||||||
.filter(|tt| match tt {
|
.filter(|tt| match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE,
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE,
|
let span = leaf.span();
|
||||||
|
span.anchor.file_id != FileId(!0) || span.range.end() == TextSize::new(!0)
|
||||||
|
}
|
||||||
|
tt::TokenTree::Subtree(_) => true,
|
||||||
|
})
|
||||||
|
.flat_map(|tt| match tt {
|
||||||
|
tt::TokenTree::Subtree(mut tt) => {
|
||||||
|
reverse_fixups_(&mut tt, undo_info);
|
||||||
|
SmallVec::from_const([tt.into()])
|
||||||
|
}
|
||||||
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
|
if leaf.span().anchor.file_id == FileId(!0) {
|
||||||
|
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
|
||||||
|
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||||
|
original.token_trees.into()
|
||||||
|
} else {
|
||||||
|
SmallVec::from_const([original.into()])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
SmallVec::from_const([leaf.into()])
|
||||||
|
}
|
||||||
|
}
|
||||||
})
|
})
|
||||||
// .flat_map(|tt| match tt {
|
|
||||||
// tt::TokenTree::Subtree(mut tt) => {
|
|
||||||
// reverse_fixups(&mut tt, undo_info);
|
|
||||||
// SmallVec::from_const([tt.into()])
|
|
||||||
// }
|
|
||||||
// tt::TokenTree::Leaf(leaf) => {
|
|
||||||
// if let Some(id) = leaf.span().anchor {
|
|
||||||
// let original = undo_info.original[id.0 as usize].clone();
|
|
||||||
// if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
|
||||||
// original.token_trees.into()
|
|
||||||
// } else {
|
|
||||||
// SmallVec::from_const([original.into()])
|
|
||||||
// }
|
|
||||||
// } else {
|
|
||||||
// SmallVec::from_const([leaf.into()])
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// })
|
|
||||||
.collect();
|
.collect();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use base_db::FileId;
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::tt;
|
use crate::{
|
||||||
|
fixup::reverse_fixups,
|
||||||
use super::reverse_fixups;
|
span::{RealSpanMap, SpanMap},
|
||||||
|
tt,
|
||||||
|
};
|
||||||
|
|
||||||
// The following three functions are only meant to check partial structural equivalence of
|
// The following three functions are only meant to check partial structural equivalence of
|
||||||
// `TokenTree`s, see the last assertion in `check()`.
|
// `TokenTree`s, see the last assertion in `check()`.
|
||||||
|
@ -352,13 +373,13 @@ mod tests {
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn check(ra_fixture: &str, mut expect: Expect) {
|
fn check(ra_fixture: &str, mut expect: Expect) {
|
||||||
let parsed = syntax::SourceFile::parse(ra_fixture);
|
let parsed = syntax::SourceFile::parse(ra_fixture);
|
||||||
let fixups = super::fixup_syntax(&parsed.syntax_node());
|
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId(0))));
|
||||||
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
|
||||||
|
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||||
&parsed.syntax_node(),
|
&parsed.syntax_node(),
|
||||||
fixups.token_map,
|
span_map.as_ref(),
|
||||||
fixups.next_id,
|
|
||||||
fixups.replace,
|
|
||||||
fixups.append,
|
fixups.append,
|
||||||
|
fixups.remove,
|
||||||
);
|
);
|
||||||
|
|
||||||
let actual = format!("{tt}\n");
|
let actual = format!("{tt}\n");
|
||||||
|
@ -374,14 +395,15 @@ mod tests {
|
||||||
parse.syntax_node()
|
parse.syntax_node()
|
||||||
);
|
);
|
||||||
|
|
||||||
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
|
reverse_fixups(&mut tt, &fixups.undo_info);
|
||||||
|
|
||||||
// the fixed-up + reversed version should be equivalent to the original input
|
// the fixed-up + reversed version should be equivalent to the original input
|
||||||
// modulo token IDs and `Punct`s' spacing.
|
// modulo token IDs and `Punct`s' spacing.
|
||||||
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
|
let original_as_tt =
|
||||||
|
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
|
||||||
assert!(
|
assert!(
|
||||||
check_subtree_eq(&tt, &original_as_tt),
|
check_subtree_eq(&tt, &original_as_tt),
|
||||||
"different token tree: {tt:?},\n{original_as_tt:?}"
|
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ pub mod mod_path;
|
||||||
pub mod attrs;
|
pub mod attrs;
|
||||||
pub mod span;
|
pub mod span;
|
||||||
pub mod files;
|
pub mod files;
|
||||||
// mod fixup;
|
mod fixup;
|
||||||
|
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -42,6 +42,7 @@ use crate::{
|
||||||
builtin_derive_macro::BuiltinDeriveExpander,
|
builtin_derive_macro::BuiltinDeriveExpander,
|
||||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||||
db::TokenExpander,
|
db::TokenExpander,
|
||||||
|
fixup::SyntaxFixupUndoInfo,
|
||||||
mod_path::ModPath,
|
mod_path::ModPath,
|
||||||
proc_macro::ProcMacroExpander,
|
proc_macro::ProcMacroExpander,
|
||||||
span::{ExpansionSpanMap, SpanMap},
|
span::{ExpansionSpanMap, SpanMap},
|
||||||
|
@ -695,8 +696,14 @@ impl ExpansionInfo {
|
||||||
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||||
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
|
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
|
||||||
|
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||||
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
|
(
|
||||||
|
Arc::new(tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
|
token_trees: Vec::new(),
|
||||||
|
}),
|
||||||
|
SyntaxFixupUndoInfo::NONE,
|
||||||
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let def = loc.def.ast_id().left().and_then(|id| {
|
let def = loc.def.ast_id().left().and_then(|id| {
|
||||||
|
|
|
@ -150,7 +150,7 @@ mod baz {}
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
main_node: Some(
|
main_node: Some(
|
||||||
InFile {
|
InFileWrapper {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub use crate::{
|
||||||
syntax_bridge::{
|
syntax_bridge::{
|
||||||
map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree,
|
map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree,
|
||||||
parse_to_token_tree_static_span, syntax_node_to_token_tree,
|
parse_to_token_tree_static_span, syntax_node_to_token_tree,
|
||||||
syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper,
|
syntax_node_to_token_tree_modified, token_tree_to_syntax_node, SpanMapper,
|
||||||
},
|
},
|
||||||
token_map::TokenMap,
|
token_map::TokenMap,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
|
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
|
||||||
|
|
||||||
use stdx::non_empty_vec::NonEmptyVec;
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
use stdx::{never, non_empty_vec::NonEmptyVec};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make::tokens::doc_comment},
|
ast::{self, make::tokens::doc_comment},
|
||||||
AstToken, NodeOrToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
|
AstToken, NodeOrToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
|
||||||
|
@ -74,14 +75,15 @@ where
|
||||||
Ctx: SyntaxContext,
|
Ctx: SyntaxContext,
|
||||||
SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
|
SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
|
||||||
{
|
{
|
||||||
let mut c = Converter::new(node, vec![], map);
|
let mut c = Converter::new(node, map, Default::default(), Default::default());
|
||||||
convert_tokens(&mut c)
|
convert_tokens(&mut c)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn syntax_node_to_token_tree_censored<Anchor, Ctx, SpanMap>(
|
pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
censored: Vec<SyntaxNode>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Anchor, Ctx>>>>,
|
||||||
|
remove: FxHashSet<SyntaxNode>,
|
||||||
) -> tt::Subtree<SpanData<Anchor, Ctx>>
|
) -> tt::Subtree<SpanData<Anchor, Ctx>>
|
||||||
where
|
where
|
||||||
SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
|
SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
|
||||||
|
@ -89,7 +91,7 @@ where
|
||||||
Anchor: Copy,
|
Anchor: Copy,
|
||||||
Ctx: SyntaxContext,
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
let mut c = Converter::new(node, censored, map);
|
let mut c = Converter::new(node, map, append, remove);
|
||||||
convert_tokens(&mut c)
|
convert_tokens(&mut c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -237,9 +239,9 @@ where
|
||||||
while let Some((token, abs_range)) = conv.bump() {
|
while let Some((token, abs_range)) = conv.bump() {
|
||||||
let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
|
let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
|
||||||
|
|
||||||
let kind = token.kind(conv);
|
let tt = match token.as_leaf() {
|
||||||
|
Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
|
||||||
let tt = match kind {
|
None => match token.kind(conv) {
|
||||||
// Desugar doc comments into doc attributes
|
// Desugar doc comments into doc attributes
|
||||||
COMMENT => {
|
COMMENT => {
|
||||||
let span = conv.span_for(abs_range);
|
let span = conv.span_for(abs_range);
|
||||||
|
@ -248,7 +250,7 @@ where
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
_ if kind.is_punct() && kind != UNDERSCORE => {
|
kind if kind.is_punct() && kind != UNDERSCORE => {
|
||||||
let expected = match delimiter.kind {
|
let expected = match delimiter.kind {
|
||||||
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
||||||
tt::DelimiterKind::Brace => Some(T!['}']),
|
tt::DelimiterKind::Brace => Some(T!['}']),
|
||||||
|
@ -295,12 +297,14 @@ where
|
||||||
let Some(char) = token.to_char(conv) else {
|
let Some(char) = token.to_char(conv) else {
|
||||||
panic!("Token from lexer must be single char: token = {token:#?}")
|
panic!("Token from lexer must be single char: token = {token:#?}")
|
||||||
};
|
};
|
||||||
tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) }).into()
|
tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
_ => {
|
kind => {
|
||||||
macro_rules! make_leaf {
|
macro_rules! make_leaf {
|
||||||
($i:ident) => {
|
($i:ident) => {
|
||||||
tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }.into()
|
tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }
|
||||||
|
.into()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let leaf: tt::Leaf<_> = match kind {
|
let leaf: tt::Leaf<_> = match kind {
|
||||||
|
@ -333,6 +337,7 @@ where
|
||||||
|
|
||||||
leaf.into()
|
leaf.into()
|
||||||
}
|
}
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
result.push(tt);
|
result.push(tt);
|
||||||
|
@ -470,16 +475,20 @@ struct StaticRawConverter<'a, S> {
|
||||||
span: S,
|
span: S,
|
||||||
}
|
}
|
||||||
|
|
||||||
trait SrcToken<Ctx>: std::fmt::Debug {
|
trait SrcToken<Ctx, S>: std::fmt::Debug {
|
||||||
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
|
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
|
||||||
|
|
||||||
fn to_char(&self, ctx: &Ctx) -> Option<char>;
|
fn to_char(&self, ctx: &Ctx) -> Option<char>;
|
||||||
|
|
||||||
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
||||||
|
|
||||||
|
fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
trait TokenConverter<S>: Sized {
|
trait TokenConverter<S>: Sized {
|
||||||
type Token: SrcToken<Self>;
|
type Token: SrcToken<Self, S>;
|
||||||
|
|
||||||
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
|
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
|
||||||
|
|
||||||
|
@ -490,7 +499,7 @@ trait TokenConverter<S>: Sized {
|
||||||
fn span_for(&self, range: TextRange) -> S;
|
fn span_for(&self, range: TextRange) -> S;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
|
impl<Anchor, S> SrcToken<RawConverter<'_, Anchor>, S> for usize {
|
||||||
fn kind(&self, ctx: &RawConverter<'_, Anchor>) -> SyntaxKind {
|
fn kind(&self, ctx: &RawConverter<'_, Anchor>) -> SyntaxKind {
|
||||||
ctx.lexed.kind(*self)
|
ctx.lexed.kind(*self)
|
||||||
}
|
}
|
||||||
|
@ -504,7 +513,7 @@ impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Span> SrcToken<StaticRawConverter<'_, S>> for usize {
|
impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
|
||||||
fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
|
fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
|
||||||
ctx.lexed.kind(*self)
|
ctx.lexed.kind(*self)
|
||||||
}
|
}
|
||||||
|
@ -593,32 +602,79 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Converter<SpanMap> {
|
struct Converter<SpanMap, S> {
|
||||||
current: Option<SyntaxToken>,
|
current: Option<SyntaxToken>,
|
||||||
|
current_leafs: Vec<tt::Leaf<S>>,
|
||||||
preorder: PreorderWithTokens,
|
preorder: PreorderWithTokens,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
punct_offset: Option<(SyntaxToken, TextSize)>,
|
punct_offset: Option<(SyntaxToken, TextSize)>,
|
||||||
/// Used to make the emitted text ranges in the spans relative to the span anchor.
|
/// Used to make the emitted text ranges in the spans relative to the span anchor.
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
censored: Vec<SyntaxNode>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||||
|
remove: FxHashSet<SyntaxNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanMap> Converter<SpanMap> {
|
impl<SpanMap, S> Converter<SpanMap, S> {
|
||||||
fn new(node: &SyntaxNode, censored: Vec<SyntaxNode>, map: SpanMap) -> Self {
|
fn new(
|
||||||
let range = node.text_range();
|
node: &SyntaxNode,
|
||||||
let mut preorder = node.preorder_with_tokens();
|
map: SpanMap,
|
||||||
let first = Self::next_token(&mut preorder, &censored);
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||||
Converter { current: first, preorder, range, punct_offset: None, censored, map }
|
remove: FxHashSet<SyntaxNode>,
|
||||||
|
) -> Self {
|
||||||
|
let mut this = Converter {
|
||||||
|
current: None,
|
||||||
|
preorder: node.preorder_with_tokens(),
|
||||||
|
range: node.text_range(),
|
||||||
|
punct_offset: None,
|
||||||
|
map,
|
||||||
|
append,
|
||||||
|
remove,
|
||||||
|
current_leafs: vec![],
|
||||||
|
};
|
||||||
|
let first = this.next_token();
|
||||||
|
this.current = first;
|
||||||
|
this
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_token(preorder: &mut PreorderWithTokens, censor: &[SyntaxNode]) -> Option<SyntaxToken> {
|
fn next_token(&mut self) -> Option<SyntaxToken> {
|
||||||
while let Some(ev) = preorder.next() {
|
// while let Some(ev) = self.preorder.next() {
|
||||||
|
// match ev {
|
||||||
|
// WalkEvent::Enter(SyntaxElement::Token(t)) => {
|
||||||
|
// if let Some(leafs) = self.append.remove(&t.clone().into()) {
|
||||||
|
// self.current_leafs.extend(leafs);
|
||||||
|
// }
|
||||||
|
// return Some(t);
|
||||||
|
// }
|
||||||
|
// WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
|
||||||
|
// self.preorder.skip_subtree();
|
||||||
|
// if let Some(leafs) = self.append.remove(&n.into()) {
|
||||||
|
// self.current_leafs.extend(leafs);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// _ => (),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// None;
|
||||||
|
|
||||||
|
while let Some(ev) = self.preorder.next() {
|
||||||
match ev {
|
match ev {
|
||||||
WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
|
WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
|
||||||
WalkEvent::Enter(SyntaxElement::Node(n)) if censor.contains(&n) => {
|
WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
|
||||||
preorder.skip_subtree()
|
self.preorder.skip_subtree();
|
||||||
|
if let Some(mut v) = self.append.remove(&n.into()) {
|
||||||
|
v.reverse();
|
||||||
|
self.current_leafs.extend(v);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
WalkEvent::Enter(SyntaxElement::Node(_)) => (),
|
||||||
|
WalkEvent::Leave(ele) => {
|
||||||
|
if let Some(mut v) = self.append.remove(&ele) {
|
||||||
|
v.reverse();
|
||||||
|
self.current_leafs.extend(v);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
_ => (),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
@ -626,45 +682,62 @@ impl<SpanMap> Converter<SpanMap> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum SynToken {
|
enum SynToken<S> {
|
||||||
Ordinary(SyntaxToken),
|
Ordinary(SyntaxToken),
|
||||||
Punct(SyntaxToken, usize),
|
Punct { token: SyntaxToken, offset: usize },
|
||||||
|
Leaf(tt::Leaf<S>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SynToken {
|
impl<S> SynToken<S> {
|
||||||
fn token(&self) -> &SyntaxToken {
|
fn token(&self) -> &SyntaxToken {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(it) | SynToken::Punct(it, _) => it,
|
SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it,
|
||||||
|
SynToken::Leaf(_) => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanMap> SrcToken<Converter<SpanMap>> for SynToken {
|
impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
|
||||||
fn kind(&self, ctx: &Converter<SpanMap>) -> SyntaxKind {
|
fn kind(&self, ctx: &Converter<SpanMap, S>) -> SyntaxKind {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(token) => token.kind(),
|
SynToken::Ordinary(token) => token.kind(),
|
||||||
SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
|
SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
|
||||||
|
SynToken::Leaf(_) => {
|
||||||
|
never!();
|
||||||
|
SyntaxKind::ERROR
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn to_char(&self, _ctx: &Converter<SpanMap>) -> Option<char> {
|
}
|
||||||
|
fn to_char(&self, _ctx: &Converter<SpanMap, S>) -> Option<char> {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(_) => None,
|
SynToken::Ordinary(_) => None,
|
||||||
SynToken::Punct(it, i) => it.text().chars().nth(*i),
|
SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
|
||||||
|
SynToken::Leaf(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn to_text(&self, _ctx: &Converter<SpanMap>) -> SmolStr {
|
fn to_text(&self, _ctx: &Converter<SpanMap, S>) -> SmolStr {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(),
|
SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
|
||||||
|
SynToken::Leaf(_) => {
|
||||||
|
never!();
|
||||||
|
"".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
|
||||||
|
match self {
|
||||||
|
SynToken::Ordinary(_) | SynToken::Punct { .. } => None,
|
||||||
|
SynToken::Leaf(it) => Some(it),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap>
|
impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
|
||||||
where
|
where
|
||||||
S: Span,
|
S: Span,
|
||||||
SpanMap: SpanMapper<S>,
|
SpanMap: SpanMapper<S>,
|
||||||
{
|
{
|
||||||
type Token = SynToken;
|
type Token = SynToken<S>;
|
||||||
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
|
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
|
||||||
convert_doc_comment(token.token(), span)
|
convert_doc_comment(token.token(), span)
|
||||||
}
|
}
|
||||||
|
@ -676,20 +749,31 @@ where
|
||||||
let range = punct.text_range();
|
let range = punct.text_range();
|
||||||
self.punct_offset = Some((punct.clone(), offset));
|
self.punct_offset = Some((punct.clone(), offset));
|
||||||
let range = TextRange::at(range.start() + offset, TextSize::of('.'));
|
let range = TextRange::at(range.start() + offset, TextSize::of('.'));
|
||||||
return Some((SynToken::Punct(punct, u32::from(offset) as usize), range));
|
return Some((
|
||||||
|
SynToken::Punct { token: punct, offset: u32::from(offset) as usize },
|
||||||
|
range,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(leaf) = self.current_leafs.pop() {
|
||||||
|
if self.current_leafs.is_empty() {
|
||||||
|
self.current = self.next_token();
|
||||||
|
}
|
||||||
|
return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
|
||||||
|
}
|
||||||
|
|
||||||
let curr = self.current.clone()?;
|
let curr = self.current.clone()?;
|
||||||
if !self.range.contains_range(curr.text_range()) {
|
if !self.range.contains_range(curr.text_range()) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
self.current = Self::next_token(&mut self.preorder, &self.censored);
|
|
||||||
|
self.current = self.next_token();
|
||||||
let token = if curr.kind().is_punct() {
|
let token = if curr.kind().is_punct() {
|
||||||
self.punct_offset = Some((curr.clone(), 0.into()));
|
self.punct_offset = Some((curr.clone(), 0.into()));
|
||||||
let range = curr.text_range();
|
let range = curr.text_range();
|
||||||
let range = TextRange::at(range.start(), TextSize::of('.'));
|
let range = TextRange::at(range.start(), TextSize::of('.'));
|
||||||
(SynToken::Punct(curr, 0 as usize), range)
|
(SynToken::Punct { token: curr, offset: 0 as usize }, range)
|
||||||
} else {
|
} else {
|
||||||
self.punct_offset = None;
|
self.punct_offset = None;
|
||||||
let range = curr.text_range();
|
let range = curr.text_range();
|
||||||
|
@ -703,7 +787,7 @@ where
|
||||||
if let Some((punct, mut offset)) = self.punct_offset.clone() {
|
if let Some((punct, mut offset)) = self.punct_offset.clone() {
|
||||||
offset += TextSize::of('.');
|
offset += TextSize::of('.');
|
||||||
if usize::from(offset) < punct.text().len() {
|
if usize::from(offset) < punct.text().len() {
|
||||||
return Some(SynToken::Punct(punct, usize::from(offset)));
|
return Some(SynToken::Punct { token: punct, offset: usize::from(offset) });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -713,7 +797,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
let token = if curr.kind().is_punct() {
|
let token = if curr.kind().is_punct() {
|
||||||
SynToken::Punct(curr, 0 as usize)
|
SynToken::Punct { token: curr, offset: 0 as usize }
|
||||||
} else {
|
} else {
|
||||||
SynToken::Ordinary(curr)
|
SynToken::Ordinary(curr)
|
||||||
};
|
};
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue