mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 12:29:21 +00:00
Shrink mbe's Op
This commit is contained in:
parent
e052b3e9a6
commit
8df034d453
12 changed files with 55 additions and 29 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1046,6 +1046,7 @@ checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
|
||||||
name = "mbe"
|
name = "mbe"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"arrayvec",
|
||||||
"cov-mark",
|
"cov-mark",
|
||||||
"parser",
|
"parser",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
|
|
@ -103,12 +103,13 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[
|
||||||
/// is computed by the `block_def_map` query.
|
/// is computed by the `block_def_map` query.
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct DefMap {
|
pub struct DefMap {
|
||||||
|
/// The crate this `DefMap` belongs to.
|
||||||
|
krate: CrateId,
|
||||||
/// When this is a block def map, this will hold the block id of the block and module that
|
/// When this is a block def map, this will hold the block id of the block and module that
|
||||||
/// contains this block.
|
/// contains this block.
|
||||||
block: Option<BlockInfo>,
|
block: Option<BlockInfo>,
|
||||||
/// The modules and their data declared in this crate.
|
/// The modules and their data declared in this crate.
|
||||||
pub modules: Arena<ModuleData>,
|
pub modules: Arena<ModuleData>,
|
||||||
krate: CrateId,
|
|
||||||
/// The prelude module for this crate. This either comes from an import
|
/// The prelude module for this crate. This either comes from an import
|
||||||
/// marked with the `prelude_import` attribute, or (in the normal case) from
|
/// marked with the `prelude_import` attribute, or (in the normal case) from
|
||||||
/// a dependency (`std` or `core`).
|
/// a dependency (`std` or `core`).
|
||||||
|
@ -124,6 +125,7 @@ pub struct DefMap {
|
||||||
|
|
||||||
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
|
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
|
||||||
/// attributes.
|
/// attributes.
|
||||||
|
// FIXME: Figure out a better way for the IDE layer to resolve these?
|
||||||
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
|
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
|
||||||
|
|
||||||
/// The diagnostics that need to be emitted for this crate.
|
/// The diagnostics that need to be emitted for this crate.
|
||||||
|
|
|
@ -25,7 +25,8 @@ impl ChangeWithProcMacros {
|
||||||
|
|
||||||
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
|
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
|
||||||
self.source_change.apply(db);
|
self.source_change.apply(db);
|
||||||
if let Some(proc_macros) = self.proc_macros {
|
if let Some(mut proc_macros) = self.proc_macros {
|
||||||
|
proc_macros.shrink_to_fit();
|
||||||
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
|
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
|
||||||
}
|
}
|
||||||
if let Some(target_data_layouts) = self.target_data_layouts {
|
if let Some(target_data_layouts) = self.target_data_layouts {
|
||||||
|
|
|
@ -1322,7 +1322,7 @@ fn iterate_inherent_methods(
|
||||||
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
|
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
|
||||||
) -> ControlFlow<()> {
|
) -> ControlFlow<()> {
|
||||||
for &impl_id in impls.for_self_ty(self_ty) {
|
for &impl_id in impls.for_self_ty(self_ty) {
|
||||||
for &item in &table.db.impl_data(impl_id).items {
|
for &item in table.db.impl_data(impl_id).items.iter() {
|
||||||
let visible = match is_valid_impl_method_candidate(
|
let visible = match is_valid_impl_method_candidate(
|
||||||
table,
|
table,
|
||||||
self_ty,
|
self_ty,
|
||||||
|
|
|
@ -760,7 +760,7 @@ impl Module {
|
||||||
impl_assoc_items_scratch.clear();
|
impl_assoc_items_scratch.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
for &item in &db.impl_data(impl_def.id).items {
|
for &item in db.impl_data(impl_def.id).items.iter() {
|
||||||
AssocItem::from(item).diagnostics(db, acc, style_lints);
|
AssocItem::from(item).diagnostics(db, acc, style_lints);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -231,7 +231,7 @@ impl<'a> SymbolCollector<'a> {
|
||||||
let impl_data = self.db.impl_data(impl_id);
|
let impl_data = self.db.impl_data(impl_id);
|
||||||
let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
|
let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
|
||||||
self.with_container_name(impl_name, |s| {
|
self.with_container_name(impl_name, |s| {
|
||||||
for &assoc_item_id in &impl_data.items {
|
for &assoc_item_id in impl_data.items.iter() {
|
||||||
s.push_assoc_item(assoc_item_id)
|
s.push_assoc_item(assoc_item_id)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1"
|
||||||
rustc-hash.workspace = true
|
rustc-hash.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
|
arrayvec.workspace = true
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
syntax.workspace = true
|
syntax.workspace = true
|
||||||
|
|
|
@ -170,7 +170,7 @@ fn invocation_fixtures(
|
||||||
Op::Literal(it) => token_trees.push(tt::Leaf::from(it.clone()).into()),
|
Op::Literal(it) => token_trees.push(tt::Leaf::from(it.clone()).into()),
|
||||||
Op::Ident(it) => token_trees.push(tt::Leaf::from(it.clone()).into()),
|
Op::Ident(it) => token_trees.push(tt::Leaf::from(it.clone()).into()),
|
||||||
Op::Punct(puncts) => {
|
Op::Punct(puncts) => {
|
||||||
for punct in puncts {
|
for punct in puncts.as_slice() {
|
||||||
token_trees.push(tt::Leaf::from(*punct).into());
|
token_trees.push(tt::Leaf::from(*punct).into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -187,7 +187,7 @@ fn invocation_fixtures(
|
||||||
}
|
}
|
||||||
if i + 1 != cnt {
|
if i + 1 != cnt {
|
||||||
if let Some(sep) = separator {
|
if let Some(sep) = separator {
|
||||||
match sep {
|
match &**sep {
|
||||||
Separator::Literal(it) => {
|
Separator::Literal(it) => {
|
||||||
token_trees.push(tt::Leaf::Literal(it.clone()).into())
|
token_trees.push(tt::Leaf::Literal(it.clone()).into())
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,7 @@
|
||||||
//! eof: [a $( a )* a b ·]
|
//! eof: [a $( a )* a b ·]
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::rc::Rc;
|
use std::{rc::Rc, sync::Arc};
|
||||||
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use span::{Edition, Span};
|
use span::{Edition, Span};
|
||||||
|
@ -315,7 +315,7 @@ struct MatchState<'t> {
|
||||||
up: Option<Box<MatchState<'t>>>,
|
up: Option<Box<MatchState<'t>>>,
|
||||||
|
|
||||||
/// The separator if we are in a repetition.
|
/// The separator if we are in a repetition.
|
||||||
sep: Option<Separator>,
|
sep: Option<Arc<Separator>>,
|
||||||
|
|
||||||
/// The KleeneOp of this sequence if we are in a repetition.
|
/// The KleeneOp of this sequence if we are in a repetition.
|
||||||
sep_kind: Option<RepeatKind>,
|
sep_kind: Option<RepeatKind>,
|
||||||
|
|
|
@ -195,7 +195,7 @@ fn expand_subtree(
|
||||||
.into(),
|
.into(),
|
||||||
),
|
),
|
||||||
Op::Punct(puncts) => {
|
Op::Punct(puncts) => {
|
||||||
for punct in puncts {
|
for punct in puncts.as_slice() {
|
||||||
arena.push(
|
arena.push(
|
||||||
tt::Leaf::from({
|
tt::Leaf::from({
|
||||||
let mut it = *punct;
|
let mut it = *punct;
|
||||||
|
@ -222,7 +222,7 @@ fn expand_subtree(
|
||||||
}
|
}
|
||||||
Op::Repeat { tokens: subtree, kind, separator } => {
|
Op::Repeat { tokens: subtree, kind, separator } => {
|
||||||
let ExpandResult { value: fragment, err: e } =
|
let ExpandResult { value: fragment, err: e } =
|
||||||
expand_repeat(ctx, subtree, *kind, separator, arena, marker);
|
expand_repeat(ctx, subtree, *kind, separator.as_deref(), arena, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
push_fragment(ctx, arena, fragment)
|
push_fragment(ctx, arena, fragment)
|
||||||
}
|
}
|
||||||
|
@ -383,7 +383,7 @@ fn expand_repeat(
|
||||||
ctx: &mut ExpandCtx<'_>,
|
ctx: &mut ExpandCtx<'_>,
|
||||||
template: &MetaTemplate,
|
template: &MetaTemplate,
|
||||||
kind: RepeatKind,
|
kind: RepeatKind,
|
||||||
separator: &Option<Separator>,
|
separator: Option<&Separator>,
|
||||||
arena: &mut Vec<tt::TokenTree<Span>>,
|
arena: &mut Vec<tt::TokenTree<Span>>,
|
||||||
marker: impl Fn(&mut Span) + Copy,
|
marker: impl Fn(&mut Span) + Copy,
|
||||||
) -> ExpandResult<Fragment> {
|
) -> ExpandResult<Fragment> {
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
//! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token
|
//! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token
|
||||||
//! trees.
|
//! trees.
|
||||||
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use arrayvec::ArrayVec;
|
||||||
use span::{Edition, Span, SyntaxContextId};
|
use span::{Edition, Span, SyntaxContextId};
|
||||||
use syntax::SmolStr;
|
use syntax::SmolStr;
|
||||||
|
|
||||||
|
@ -86,14 +88,14 @@ pub(crate) enum Op {
|
||||||
Repeat {
|
Repeat {
|
||||||
tokens: MetaTemplate,
|
tokens: MetaTemplate,
|
||||||
kind: RepeatKind,
|
kind: RepeatKind,
|
||||||
separator: Option<Separator>,
|
separator: Option<Arc<Separator>>,
|
||||||
},
|
},
|
||||||
Subtree {
|
Subtree {
|
||||||
tokens: MetaTemplate,
|
tokens: MetaTemplate,
|
||||||
delimiter: tt::Delimiter<Span>,
|
delimiter: tt::Delimiter<Span>,
|
||||||
},
|
},
|
||||||
Literal(tt::Literal<Span>),
|
Literal(tt::Literal<Span>),
|
||||||
Punct(SmallVec<[tt::Punct<Span>; 3]>),
|
Punct(Box<ArrayVec<tt::Punct<Span>, 3>>),
|
||||||
Ident(tt::Ident<Span>),
|
Ident(tt::Ident<Span>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,7 +128,7 @@ pub(crate) enum MetaVarKind {
|
||||||
pub(crate) enum Separator {
|
pub(crate) enum Separator {
|
||||||
Literal(tt::Literal<Span>),
|
Literal(tt::Literal<Span>),
|
||||||
Ident(tt::Ident<Span>),
|
Ident(tt::Ident<Span>),
|
||||||
Puncts(SmallVec<[tt::Punct<Span>; 3]>),
|
Puncts(ArrayVec<tt::Punct<Span>, 3>),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note that when we compare a Separator, we just care about its textual value.
|
// Note that when we compare a Separator, we just care about its textual value.
|
||||||
|
@ -165,7 +167,13 @@ fn next_op(
|
||||||
src.next().expect("first token already peeked");
|
src.next().expect("first token already peeked");
|
||||||
// Note that the '$' itself is a valid token inside macro_rules.
|
// Note that the '$' itself is a valid token inside macro_rules.
|
||||||
let second = match src.next() {
|
let second = match src.next() {
|
||||||
None => return Ok(Op::Punct(smallvec![*p])),
|
None => {
|
||||||
|
return Ok(Op::Punct({
|
||||||
|
let mut res = ArrayVec::new();
|
||||||
|
res.push(*p);
|
||||||
|
Box::new(res)
|
||||||
|
}))
|
||||||
|
}
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
};
|
};
|
||||||
match second {
|
match second {
|
||||||
|
@ -173,7 +181,7 @@ fn next_op(
|
||||||
tt::DelimiterKind::Parenthesis => {
|
tt::DelimiterKind::Parenthesis => {
|
||||||
let (separator, kind) = parse_repeat(src)?;
|
let (separator, kind) = parse_repeat(src)?;
|
||||||
let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?;
|
let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?;
|
||||||
Op::Repeat { tokens, separator, kind }
|
Op::Repeat { tokens, separator: separator.map(Arc::new), kind }
|
||||||
}
|
}
|
||||||
tt::DelimiterKind::Brace => match mode {
|
tt::DelimiterKind::Brace => match mode {
|
||||||
Mode::Template => {
|
Mode::Template => {
|
||||||
|
@ -216,7 +224,11 @@ fn next_op(
|
||||||
"`$$` is not allowed on the pattern side",
|
"`$$` is not allowed on the pattern side",
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
Mode::Template => Op::Punct(smallvec![*punct]),
|
Mode::Template => Op::Punct({
|
||||||
|
let mut res = ArrayVec::new();
|
||||||
|
res.push(*punct);
|
||||||
|
Box::new(res)
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => {
|
tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => {
|
||||||
return Err(ParseError::expected("expected ident"))
|
return Err(ParseError::expected("expected ident"))
|
||||||
|
@ -238,7 +250,7 @@ fn next_op(
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => {
|
tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => {
|
||||||
// There's at least one punct so this shouldn't fail.
|
// There's at least one punct so this shouldn't fail.
|
||||||
let puncts = src.expect_glued_punct().unwrap();
|
let puncts = src.expect_glued_punct().unwrap();
|
||||||
Op::Punct(puncts)
|
Op::Punct(Box::new(puncts))
|
||||||
}
|
}
|
||||||
|
|
||||||
tt::TokenTree::Subtree(subtree) => {
|
tt::TokenTree::Subtree(subtree) => {
|
||||||
|
@ -290,7 +302,7 @@ fn is_boolean_literal(lit: &tt::Literal<Span>) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
|
fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
|
||||||
let mut separator = Separator::Puncts(SmallVec::new());
|
let mut separator = Separator::Puncts(ArrayVec::new());
|
||||||
for tt in src {
|
for tt in src {
|
||||||
let tt = match tt {
|
let tt = match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => leaf,
|
tt::TokenTree::Leaf(leaf) => leaf,
|
||||||
|
@ -312,7 +324,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
|
||||||
'+' => RepeatKind::OneOrMore,
|
'+' => RepeatKind::OneOrMore,
|
||||||
'?' => RepeatKind::ZeroOrOne,
|
'?' => RepeatKind::ZeroOrOne,
|
||||||
_ => match &mut separator {
|
_ => match &mut separator {
|
||||||
Separator::Puncts(puncts) if puncts.len() != 3 => {
|
Separator::Puncts(puncts) if puncts.len() < 3 => {
|
||||||
puncts.push(*punct);
|
puncts.push(*punct);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
use arrayvec::ArrayVec;
|
||||||
use syntax::SyntaxKind;
|
use syntax::SyntaxKind;
|
||||||
|
|
||||||
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
|
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
|
||||||
|
@ -93,13 +93,15 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
||||||
///
|
///
|
||||||
/// This method currently may return a single quotation, which is part of lifetime ident and
|
/// This method currently may return a single quotation, which is part of lifetime ident and
|
||||||
/// conceptually not a punct in the context of mbe. Callers should handle this.
|
/// conceptually not a punct in the context of mbe. Callers should handle this.
|
||||||
pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct<S>; 3]>, ()> {
|
pub(crate) fn expect_glued_punct(&mut self) -> Result<ArrayVec<tt::Punct<S>, 3>, ()> {
|
||||||
let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
|
let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
|
||||||
return Err(());
|
return Err(());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut res = ArrayVec::new();
|
||||||
if first.spacing == tt::Spacing::Alone {
|
if first.spacing == tt::Spacing::Alone {
|
||||||
return Ok(smallvec![first]);
|
res.push(first);
|
||||||
|
return Ok(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
let (second, third) = match (self.peek_n(0), self.peek_n(1)) {
|
let (second, third) = match (self.peek_n(0), self.peek_n(1)) {
|
||||||
|
@ -108,14 +110,19 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))),
|
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))),
|
||||||
) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)),
|
) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)),
|
||||||
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None),
|
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None),
|
||||||
_ => return Ok(smallvec![first]),
|
_ => {
|
||||||
|
res.push(first);
|
||||||
|
return Ok(res);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match (first.char, second.char, third.map(|it| it.char)) {
|
match (first.char, second.char, third.map(|it| it.char)) {
|
||||||
('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => {
|
('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => {
|
||||||
let _ = self.next().unwrap();
|
let _ = self.next().unwrap();
|
||||||
let _ = self.next().unwrap();
|
let _ = self.next().unwrap();
|
||||||
Ok(smallvec![first, *second, *third.unwrap()])
|
res.push(first);
|
||||||
|
res.push(*second);
|
||||||
|
res.push(*third.unwrap());
|
||||||
}
|
}
|
||||||
('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
|
('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
|
||||||
| ('-' | '=' | '>', '>', _)
|
| ('-' | '=' | '>', '>', _)
|
||||||
|
@ -126,10 +133,12 @@ impl<'a, S: Copy> TtIter<'a, S> {
|
||||||
| ('<', '<', _)
|
| ('<', '<', _)
|
||||||
| ('|', '|', _) => {
|
| ('|', '|', _) => {
|
||||||
let _ = self.next().unwrap();
|
let _ = self.next().unwrap();
|
||||||
Ok(smallvec![first, *second])
|
res.push(first);
|
||||||
|
res.push(*second);
|
||||||
}
|
}
|
||||||
_ => Ok(smallvec![first]),
|
_ => res.push(first),
|
||||||
}
|
}
|
||||||
|
Ok(res)
|
||||||
}
|
}
|
||||||
pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
|
pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
|
||||||
self.inner.as_slice().get(n)
|
self.inner.as_slice().get(n)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue