mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 20:42:04 +00:00
Auto merge of #16167 - Veykril:dummy-spans, r=Veykril
fix: Fully remove dummy spans Fixes https://github.com/rust-lang/rust-analyzer/issues/16008 Some of these spans are certainly wrong, but since we discard invisible delimiters currently it doesn't really matter.
This commit is contained in:
commit
337e2abb67
32 changed files with 292 additions and 256 deletions
|
@ -11,7 +11,7 @@ use syntax::{
|
|||
};
|
||||
use tt::{
|
||||
buffer::{Cursor, TokenBuffer},
|
||||
Span, SyntaxContext,
|
||||
Span,
|
||||
};
|
||||
|
||||
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter};
|
||||
|
@ -37,7 +37,6 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
|
|||
|
||||
/// Dummy things for testing where spans don't matter.
|
||||
pub(crate) mod dummy_test_span_utils {
|
||||
use tt::SyntaxContext;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -53,9 +52,6 @@ pub(crate) mod dummy_test_span_utils {
|
|||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub struct DummyTestSyntaxContext;
|
||||
impl SyntaxContext for DummyTestSyntaxContext {
|
||||
const DUMMY: Self = DummyTestSyntaxContext;
|
||||
}
|
||||
|
||||
pub struct DummyTestSpanMap;
|
||||
|
||||
|
@ -78,13 +74,14 @@ pub(crate) mod dummy_test_span_utils {
|
|||
pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
|
||||
node: &SyntaxNode,
|
||||
map: SpanMap,
|
||||
span: SpanData<Ctx>,
|
||||
) -> tt::Subtree<SpanData<Ctx>>
|
||||
where
|
||||
SpanData<Ctx>: Span,
|
||||
Ctx: SyntaxContext,
|
||||
Ctx: Copy,
|
||||
SpanMap: SpanMapper<SpanData<Ctx>>,
|
||||
{
|
||||
let mut c = Converter::new(node, map, Default::default(), Default::default());
|
||||
let mut c = Converter::new(node, map, Default::default(), Default::default(), span);
|
||||
convert_tokens(&mut c)
|
||||
}
|
||||
|
||||
|
@ -96,13 +93,14 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
|
|||
map: SpanMap,
|
||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
|
||||
remove: FxHashSet<SyntaxNode>,
|
||||
call_site: SpanData<Ctx>,
|
||||
) -> tt::Subtree<SpanData<Ctx>>
|
||||
where
|
||||
SpanMap: SpanMapper<SpanData<Ctx>>,
|
||||
SpanData<Ctx>: Span,
|
||||
Ctx: SyntaxContext,
|
||||
Ctx: Copy,
|
||||
{
|
||||
let mut c = Converter::new(node, map, append, remove);
|
||||
let mut c = Converter::new(node, map, append, remove, call_site);
|
||||
convert_tokens(&mut c)
|
||||
}
|
||||
|
||||
|
@ -126,7 +124,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
|
|||
) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>)
|
||||
where
|
||||
SpanData<Ctx>: Span,
|
||||
Ctx: SyntaxContext,
|
||||
Ctx: Copy,
|
||||
{
|
||||
let buffer = match tt {
|
||||
tt::Subtree {
|
||||
|
@ -163,7 +161,7 @@ pub fn parse_to_token_tree<Ctx>(
|
|||
) -> Option<tt::Subtree<SpanData<Ctx>>>
|
||||
where
|
||||
SpanData<Ctx>: Span,
|
||||
Ctx: SyntaxContext,
|
||||
Ctx: Copy,
|
||||
{
|
||||
let lexed = parser::LexedStr::new(text);
|
||||
if lexed.errors().next().is_some() {
|
||||
|
@ -187,7 +185,11 @@ where
|
|||
}
|
||||
|
||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||
pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
|
||||
pub fn parse_exprs_with_sep<S: Span>(
|
||||
tt: &tt::Subtree<S>,
|
||||
sep: char,
|
||||
span: S,
|
||||
) -> Vec<tt::Subtree<S>> {
|
||||
if tt.token_trees.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
@ -200,7 +202,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
|
|||
|
||||
res.push(match expanded.value {
|
||||
None => break,
|
||||
Some(tt) => tt.subtree_or_wrap(),
|
||||
Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }),
|
||||
});
|
||||
|
||||
let mut fork = iter.clone();
|
||||
|
@ -212,7 +214,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
|
|||
|
||||
if iter.peek_n(0).is_some() {
|
||||
res.push(tt::Subtree {
|
||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
||||
token_trees: iter.cloned().collect(),
|
||||
});
|
||||
}
|
||||
|
@ -225,7 +227,10 @@ where
|
|||
C: TokenConverter<S>,
|
||||
S: Span,
|
||||
{
|
||||
let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] };
|
||||
let entry = tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(conv.call_site()),
|
||||
token_trees: vec![],
|
||||
};
|
||||
let mut stack = NonEmptyVec::new(entry);
|
||||
|
||||
while let Some((token, abs_range)) = conv.bump() {
|
||||
|
@ -501,6 +506,8 @@ trait TokenConverter<S>: Sized {
|
|||
fn peek(&self) -> Option<Self::Token>;
|
||||
|
||||
fn span_for(&self, range: TextRange) -> S;
|
||||
|
||||
fn call_site(&self) -> S;
|
||||
}
|
||||
|
||||
impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
|
||||
|
@ -531,7 +538,7 @@ impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
|
|||
}
|
||||
}
|
||||
|
||||
impl<Ctx: SyntaxContext> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
|
||||
impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
|
||||
where
|
||||
SpanData<Ctx>: Span,
|
||||
{
|
||||
|
@ -568,6 +575,10 @@ where
|
|||
fn span_for(&self, range: TextRange) -> SpanData<Ctx> {
|
||||
SpanData { range, anchor: self.anchor, ctx: self.ctx }
|
||||
}
|
||||
|
||||
fn call_site(&self) -> SpanData<Ctx> {
|
||||
SpanData { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx }
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
|
||||
|
@ -603,6 +614,10 @@ where
|
|||
fn span_for(&self, _: TextRange) -> S {
|
||||
self.span
|
||||
}
|
||||
|
||||
fn call_site(&self) -> S {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
struct Converter<SpanMap, S> {
|
||||
|
@ -615,6 +630,7 @@ struct Converter<SpanMap, S> {
|
|||
map: SpanMap,
|
||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||
remove: FxHashSet<SyntaxNode>,
|
||||
call_site: S,
|
||||
}
|
||||
|
||||
impl<SpanMap, S> Converter<SpanMap, S> {
|
||||
|
@ -623,6 +639,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
|
|||
map: SpanMap,
|
||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||
remove: FxHashSet<SyntaxNode>,
|
||||
call_site: S,
|
||||
) -> Self {
|
||||
let mut this = Converter {
|
||||
current: None,
|
||||
|
@ -632,6 +649,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
|
|||
map,
|
||||
append,
|
||||
remove,
|
||||
call_site,
|
||||
current_leafs: vec![],
|
||||
};
|
||||
let first = this.next_token();
|
||||
|
@ -791,6 +809,9 @@ where
|
|||
fn span_for(&self, range: TextRange) -> S {
|
||||
self.map.span_for(range)
|
||||
}
|
||||
fn call_site(&self) -> S {
|
||||
self.call_site
|
||||
}
|
||||
}
|
||||
|
||||
struct TtTreeSink<'a, Ctx>
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue