mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-02 22:54:58 +00:00
Option begone part 1
This commit is contained in:
parent
0f4ffaa5af
commit
96a774261f
8 changed files with 94 additions and 105 deletions
|
@ -151,7 +151,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
if let Some(err) = exp.err {
|
if let Some(err) = exp.err {
|
||||||
format_to!(expn_text, "/* error: {} */", err);
|
format_to!(expn_text, "/* error: {} */", err);
|
||||||
}
|
}
|
||||||
if let Some((parse, token_map)) = exp.value {
|
let (parse, token_map) = exp.value;
|
||||||
if expect_errors {
|
if expect_errors {
|
||||||
assert!(!parse.errors().is_empty(), "no parse errors in expansion");
|
assert!(!parse.errors().is_empty(), "no parse errors in expansion");
|
||||||
for e in parse.errors() {
|
for e in parse.errors() {
|
||||||
|
@ -179,12 +179,11 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
format_to!(expn_text, "\n{}", tree)
|
format_to!(expn_text, "\n{}", tree)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
let range = call.syntax().text_range();
|
let range = call.syntax().text_range();
|
||||||
let range: Range<usize> = range.into();
|
let range: Range<usize> = range.into();
|
||||||
|
|
||||||
if show_token_ids {
|
if show_token_ids {
|
||||||
if let Some((tree, map, _)) = arg.as_deref() {
|
let (tree, map, _) = &*arg;
|
||||||
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
||||||
let mut ranges = Vec::new();
|
let mut ranges = Vec::new();
|
||||||
extract_id_ranges(&mut ranges, map, tree);
|
extract_id_ranges(&mut ranges, map, tree);
|
||||||
|
@ -192,7 +191,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
let idx = (tt_range.start() + range.end()).into();
|
let idx = (tt_range.start() + range.end()).into();
|
||||||
text_edits.push((idx..idx, format!("#{}", id.0)));
|
text_edits.push((idx..idx, format!("#{}", id.0)));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
text_edits.push((range.start..range.start, "// ".into()));
|
text_edits.push((range.start..range.start, "// ".into()));
|
||||||
call.to_string().match_indices('\n').for_each(|(offset, _)| {
|
call.to_string().match_indices('\n').for_each(|(offset, _)| {
|
||||||
let offset = offset + 1 + range.start;
|
let offset = offset + 1 + range.start;
|
||||||
|
|
|
@ -1371,7 +1371,7 @@ impl DefCollector<'_> {
|
||||||
|
|
||||||
self.def_map.diagnostics.push(diag);
|
self.def_map.diagnostics.push(diag);
|
||||||
}
|
}
|
||||||
if let Some(errors) = value {
|
if let errors @ [_, ..] = &*value {
|
||||||
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
|
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
|
||||||
let diag = DefDiagnostic::macro_expansion_parse_error(module_id, loc.kind, &errors);
|
let diag = DefDiagnostic::macro_expansion_parse_error(module_id, loc.kind, &errors);
|
||||||
self.def_map.diagnostics.push(diag);
|
self.def_map.diagnostics.push(diag);
|
||||||
|
|
|
@ -108,7 +108,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn parse_macro_expansion(
|
fn parse_macro_expansion(
|
||||||
&self,
|
&self,
|
||||||
macro_file: MacroFile,
|
macro_file: MacroFile,
|
||||||
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>;
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
|
||||||
|
|
||||||
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
||||||
/// reason why we use salsa at all.
|
/// reason why we use salsa at all.
|
||||||
|
@ -123,7 +123,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
&self,
|
&self,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
|
) -> Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>;
|
||||||
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
||||||
/// query, only typing in the macro call itself changes the returned
|
/// query, only typing in the macro call itself changes the returned
|
||||||
/// subtree.
|
/// subtree.
|
||||||
|
@ -133,7 +133,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
|
fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
|
||||||
|
|
||||||
/// Expand macro call to a token tree.
|
/// Expand macro call to a token tree.
|
||||||
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
|
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
||||||
/// Special case of the previous query for procedural macros. We can't LRU
|
/// Special case of the previous query for procedural macros. We can't LRU
|
||||||
/// proc macros, since they are not deterministic in general, and
|
/// proc macros, since they are not deterministic in general, and
|
||||||
/// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
|
/// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
|
||||||
|
@ -143,7 +143,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
fn parse_macro_expansion_error(
|
fn parse_macro_expansion_error(
|
||||||
&self,
|
&self,
|
||||||
macro_call: MacroCallId,
|
macro_call: MacroCallId,
|
||||||
) -> ExpandResult<Option<Box<[SyntaxError]>>>;
|
) -> ExpandResult<Box<[SyntaxError]>>;
|
||||||
|
|
||||||
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
|
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
|
||||||
}
|
}
|
||||||
|
@ -257,12 +257,12 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
|
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
|
||||||
match file_id.repr() {
|
Some(match file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
|
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
|
||||||
HirFileIdRepr::MacroFile(macro_file) => {
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
|
db.parse_macro_expansion(macro_file).value.0.syntax_node()
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_or_expand_with_err(
|
fn parse_or_expand_with_err(
|
||||||
|
@ -272,7 +272,7 @@ fn parse_or_expand_with_err(
|
||||||
match file_id.repr() {
|
match file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => ExpandResult::ok(Some(db.parse(file_id).to_syntax())),
|
HirFileIdRepr::FileId(file_id) => ExpandResult::ok(Some(db.parse(file_id).to_syntax())),
|
||||||
HirFileIdRepr::MacroFile(macro_file) => {
|
HirFileIdRepr::MacroFile(macro_file) => {
|
||||||
db.parse_macro_expansion(macro_file).map(|it| it.map(|(parse, _)| parse))
|
db.parse_macro_expansion(macro_file).map(|it| Some(it.0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -280,9 +280,9 @@ fn parse_or_expand_with_err(
|
||||||
fn parse_macro_expansion(
|
fn parse_macro_expansion(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
macro_file: MacroFile,
|
macro_file: MacroFile,
|
||||||
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
||||||
let _p = profile::span("parse_macro_expansion");
|
let _p = profile::span("parse_macro_expansion");
|
||||||
let mbe::ValueResult { value, err } = db.macro_expand(macro_file.macro_call_id);
|
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
|
||||||
|
|
||||||
if let Some(err) = &err {
|
if let Some(err) = &err {
|
||||||
if tracing::enabled!(tracing::Level::DEBUG) {
|
if tracing::enabled!(tracing::Level::DEBUG) {
|
||||||
|
@ -308,10 +308,6 @@ fn parse_macro_expansion(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let tt = match value {
|
|
||||||
Some(tt) => tt,
|
|
||||||
None => return ExpandResult { value: None, err },
|
|
||||||
};
|
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
|
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
|
||||||
|
|
||||||
|
@ -320,14 +316,23 @@ fn parse_macro_expansion(
|
||||||
|
|
||||||
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
|
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
|
||||||
|
|
||||||
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err }
|
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_arg(
|
fn macro_arg(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
|
) -> Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)> {
|
||||||
let arg = db.macro_arg_text(id)?;
|
let Some(arg) = db.macro_arg_text(id) else {
|
||||||
|
return Arc::new((
|
||||||
|
tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
|
token_trees: Vec::new(),
|
||||||
|
},
|
||||||
|
Default::default(),
|
||||||
|
Default::default())
|
||||||
|
);
|
||||||
|
};
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
let node = SyntaxNode::new_root(arg);
|
||||||
|
@ -346,7 +351,7 @@ fn macro_arg(
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
tt.delimiter = tt::Delimiter::unspecified();
|
tt.delimiter = tt::Delimiter::unspecified();
|
||||||
}
|
}
|
||||||
Some(Arc::new((tt, tmap, fixups.undo_info)))
|
Arc::new((tt, tmap, fixups.undo_info))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||||
|
@ -448,29 +453,13 @@ fn macro_def(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_expand(
|
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
id: MacroCallId,
|
|
||||||
// FIXME: Remove the OPtion if possible
|
|
||||||
) -> ExpandResult<Option<Arc<tt::Subtree>>> {
|
|
||||||
let _p = profile::span("macro_expand");
|
let _p = profile::span("macro_expand");
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
|
||||||
if let Some(eager) = &loc.eager {
|
if let Some(eager) = &loc.eager {
|
||||||
return ExpandResult {
|
return ExpandResult { value: eager.arg_or_expansion.clone(), err: eager.error.clone() };
|
||||||
value: Some(eager.arg_or_expansion.clone()),
|
|
||||||
err: eager.error.clone(),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let macro_arg = match db.macro_arg(id) {
|
|
||||||
Some(it) => it,
|
|
||||||
None => {
|
|
||||||
return ExpandResult::only_err(ExpandError::Other(
|
|
||||||
"Failed to lower macro args to token tree".into(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let expander = match db.macro_def(loc.def) {
|
let expander = match db.macro_def(loc.def) {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
// FIXME: This is weird -- we effectively report macro *definition*
|
// FIXME: This is weird -- we effectively report macro *definition*
|
||||||
|
@ -478,49 +467,52 @@ fn macro_expand(
|
||||||
// be reported at the definition site when we construct a def map.
|
// be reported at the definition site when we construct a def map.
|
||||||
// (Note we do report them also at the definition site in the late diagnostic pass)
|
// (Note we do report them also at the definition site in the late diagnostic pass)
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return ExpandResult::only_err(ExpandError::Other(
|
return ExpandResult {
|
||||||
format!("invalid macro definition: {err}").into(),
|
value: Arc::new(tt::Subtree {
|
||||||
))
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
|
token_trees: vec![],
|
||||||
|
}),
|
||||||
|
err: Some(ExpandError::Other(format!("invalid macro definition: {err}").into())),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
let macro_arg = db.macro_arg(id);
|
||||||
let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0);
|
let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0);
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
let count = tt.count();
|
let count = tt.count();
|
||||||
if TOKEN_LIMIT.check(count).is_err() {
|
if TOKEN_LIMIT.check(count).is_err() {
|
||||||
return ExpandResult::only_err(ExpandError::Other(
|
return ExpandResult {
|
||||||
|
value: Arc::new(tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||||
|
token_trees: vec![],
|
||||||
|
}),
|
||||||
|
err: Some(ExpandError::Other(
|
||||||
format!(
|
format!(
|
||||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||||
count,
|
count,
|
||||||
TOKEN_LIMIT.inner(),
|
TOKEN_LIMIT.inner(),
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
));
|
)),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fixup::reverse_fixups(&mut tt, ¯o_arg.1, ¯o_arg.2);
|
fixup::reverse_fixups(&mut tt, ¯o_arg.1, ¯o_arg.2);
|
||||||
|
|
||||||
ExpandResult { value: Some(Arc::new(tt)), err }
|
ExpandResult { value: Arc::new(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_macro_expansion_error(
|
fn parse_macro_expansion_error(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
macro_call_id: MacroCallId,
|
macro_call_id: MacroCallId,
|
||||||
) -> ExpandResult<Option<Box<[SyntaxError]>>> {
|
) -> ExpandResult<Box<[SyntaxError]>> {
|
||||||
db.parse_macro_expansion(MacroFile { macro_call_id })
|
db.parse_macro_expansion(MacroFile { macro_call_id })
|
||||||
.map(|it| it.map(|(it, _)| it.errors().to_vec().into_boxed_slice()))
|
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
|
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
|
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
|
||||||
let macro_arg = match db.macro_arg(id) {
|
let macro_arg = db.macro_arg(id);
|
||||||
Some(it) => it,
|
|
||||||
None => {
|
|
||||||
return ExpandResult::with_err(
|
|
||||||
tt::Subtree::empty(),
|
|
||||||
ExpandError::Other("No arguments for proc-macro".into()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let expander = match loc.def.kind {
|
let expander = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(expander, ..) => expander,
|
MacroDefKind::ProcMacro(expander, ..) => expander,
|
||||||
|
|
|
@ -14,7 +14,7 @@ use tt::token_id::Subtree;
|
||||||
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
||||||
/// (appending to and replacing nodes), the information that is needed to
|
/// (appending to and replacing nodes), the information that is needed to
|
||||||
/// reverse those changes afterwards, and a token map.
|
/// reverse those changes afterwards, and a token map.
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Default)]
|
||||||
pub(crate) struct SyntaxFixups {
|
pub(crate) struct SyntaxFixups {
|
||||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
||||||
|
@ -24,7 +24,7 @@ pub(crate) struct SyntaxFixups {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is the information needed to reverse the fixups.
|
/// This is the information needed to reverse the fixups.
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, Default, PartialEq, Eq)]
|
||||||
pub struct SyntaxFixupUndoInfo {
|
pub struct SyntaxFixupUndoInfo {
|
||||||
original: Vec<Subtree>,
|
original: Vec<Subtree>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -200,8 +200,8 @@ fn make_hygiene_info(
|
||||||
});
|
});
|
||||||
|
|
||||||
let macro_def = db.macro_def(loc.def).ok()?;
|
let macro_def = db.macro_def(loc.def).ok()?;
|
||||||
let (_, exp_map) = db.parse_macro_expansion(macro_file).value?;
|
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
|
let macro_arg = db.macro_arg(macro_file.macro_call_id);
|
||||||
|
|
||||||
Some(HygieneInfo {
|
Some(HygieneInfo {
|
||||||
file: macro_file,
|
file: macro_file,
|
||||||
|
|
|
@ -257,8 +257,8 @@ impl HirFileId {
|
||||||
let arg_tt = loc.kind.arg(db)?;
|
let arg_tt = loc.kind.arg(db)?;
|
||||||
|
|
||||||
let macro_def = db.macro_def(loc.def).ok()?;
|
let macro_def = db.macro_def(loc.def).ok()?;
|
||||||
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
|
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
|
let macro_arg = db.macro_arg(macro_file.macro_call_id);
|
||||||
|
|
||||||
let def = loc.def.ast_id().left().and_then(|id| {
|
let def = loc.def.ast_id().left().and_then(|id| {
|
||||||
let def_tt = match id.to_node(db) {
|
let def_tt = match id.to_node(db) {
|
||||||
|
|
|
@ -120,12 +120,12 @@ impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStat
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<M> FromIterator<TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>
|
impl<M> FromIterator<TableEntry<MacroFile, ExpandResult<(Parse<SyntaxNode>, M)>>>
|
||||||
for SyntaxTreeStats
|
for SyntaxTreeStats
|
||||||
{
|
{
|
||||||
fn from_iter<T>(iter: T) -> SyntaxTreeStats
|
fn from_iter<T>(iter: T) -> SyntaxTreeStats
|
||||||
where
|
where
|
||||||
T: IntoIterator<Item = TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>,
|
T: IntoIterator<Item = TableEntry<MacroFile, ExpandResult<(Parse<SyntaxNode>, M)>>>,
|
||||||
{
|
{
|
||||||
let mut res = SyntaxTreeStats::default();
|
let mut res = SyntaxTreeStats::default();
|
||||||
for entry in iter {
|
for entry in iter {
|
||||||
|
|
|
@ -180,10 +180,9 @@ impl flags::AnalysisStats {
|
||||||
|
|
||||||
let mut total_macro_file_size = Bytes::default();
|
let mut total_macro_file_size = Bytes::default();
|
||||||
for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
|
for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
|
||||||
if let Some((val, _)) = db.parse_macro_expansion(e.key).value {
|
let val = db.parse_macro_expansion(e.key).value.0;
|
||||||
total_macro_file_size += syntax_len(val.syntax_node())
|
total_macro_file_size += syntax_len(val.syntax_node())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
|
eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue