mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 04:44:57 +00:00
Turn macro_expand from query to normal function
This commit is contained in:
parent
b98597f06d
commit
7a8c4c001b
12 changed files with 15 additions and 36 deletions
|
@ -1,4 +1,4 @@
|
||||||
/// File and span related types.
|
//! File and span related types.
|
||||||
// FIXME: This should probably be moved into its own crate.
|
// FIXME: This should probably be moved into its own crate.
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
@ -26,19 +26,15 @@ impl fmt::Display for SyntaxContextId {
|
||||||
|
|
||||||
impl SyntaxContext for SyntaxContextId {
|
impl SyntaxContext for SyntaxContextId {
|
||||||
const DUMMY: Self = Self::ROOT;
|
const DUMMY: Self = Self::ROOT;
|
||||||
// veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so
|
|
||||||
// we need a special value that behaves as the current context.
|
|
||||||
}
|
}
|
||||||
// inherent trait impls please tyvm
|
// inherent trait impls please tyvm
|
||||||
impl SyntaxContextId {
|
impl SyntaxContextId {
|
||||||
// TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
|
|
||||||
// currently (which kind of makes sense but we need it here!)
|
|
||||||
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
|
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
|
||||||
// TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
|
// veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so
|
||||||
// currently (which kind of makes sense but we need it here!)
|
// we need a special value that behaves as the current context.
|
||||||
pub const SELF_REF: Self =
|
pub const SELF_REF: Self =
|
||||||
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
||||||
/// Used syntax fixups
|
// Used for syntax fixups
|
||||||
pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) });
|
pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) });
|
||||||
|
|
||||||
pub fn is_root(self) -> bool {
|
pub fn is_root(self) -> bool {
|
||||||
|
|
|
@ -74,7 +74,6 @@ fn foo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore] // TODO
|
|
||||||
fn attribute_macro_syntax_completion_2() {
|
fn attribute_macro_syntax_completion_2() {
|
||||||
// common case of dot completion while typing
|
// common case of dot completion while typing
|
||||||
check(
|
check(
|
||||||
|
|
|
@ -146,9 +146,6 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
id: AstId<ast::Macro>,
|
id: AstId<ast::Macro>,
|
||||||
) -> Arc<DeclarativeMacroExpander>;
|
) -> Arc<DeclarativeMacroExpander>;
|
||||||
|
|
||||||
/// Expand macro call to a token tree.
|
|
||||||
// This query is LRU cached
|
|
||||||
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
|
|
||||||
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
|
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
|
||||||
fn include_expand(
|
fn include_expand(
|
||||||
&self,
|
&self,
|
||||||
|
@ -315,7 +312,7 @@ fn parse_macro_expansion(
|
||||||
macro_file: MacroFileId,
|
macro_file: MacroFileId,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
||||||
let _p = profile::span("parse_macro_expansion");
|
let _p = profile::span("parse_macro_expansion");
|
||||||
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
|
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id);
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
|
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
//! Things to wrap other things in file ids.
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use base_db::{
|
use base_db::{
|
||||||
|
|
|
@ -52,7 +52,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
while let Some(event) = preorder.next() {
|
while let Some(event) = preorder.next() {
|
||||||
let syntax::WalkEvent::Enter(node) = event else { continue };
|
let syntax::WalkEvent::Enter(node) = event else { continue };
|
||||||
|
|
||||||
/* TODO
|
/*
|
||||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||||
// the node contains an error node, we have to completely replace it by something valid
|
// the node contains an error node, we have to completely replace it by something valid
|
||||||
let (original_tree, new_tmap, new_next_id) =
|
let (original_tree, new_tmap, new_next_id) =
|
||||||
|
@ -295,7 +295,7 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
|
||||||
tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE,
|
tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE,
|
||||||
tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE,
|
tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE,
|
||||||
})
|
})
|
||||||
// .flat_map(|tt| match tt { TODO
|
// .flat_map(|tt| match tt {
|
||||||
// tt::TokenTree::Subtree(mut tt) => {
|
// tt::TokenTree::Subtree(mut tt) => {
|
||||||
// reverse_fixups(&mut tt, undo_info);
|
// reverse_fixups(&mut tt, undo_info);
|
||||||
// SmallVec::from_const([tt.into()])
|
// SmallVec::from_const([tt.into()])
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
|
||||||
|
//! as associating spans with text ranges in a particular file.
|
||||||
use base_db::{
|
use base_db::{
|
||||||
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||||
FileId,
|
FileId,
|
||||||
|
|
|
@ -23,7 +23,7 @@ pub use hir_def::db::{
|
||||||
};
|
};
|
||||||
pub use hir_expand::db::{
|
pub use hir_expand::db::{
|
||||||
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
|
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
|
||||||
ExpandProcMacroQuery, IncludeExpandQuery, InternMacroCallQuery, MacroArgQuery,
|
ExpandProcMacroQuery, IncludeExpandQuery, InternMacroCallQuery, InternSyntaxContextQuery,
|
||||||
MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
|
MacroArgQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
|
||||||
};
|
};
|
||||||
pub use hir_ty::db::*;
|
pub use hir_ty::db::*;
|
||||||
|
|
|
@ -9,7 +9,6 @@ fn check(ra_fixture: &str, expect: Expect) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore] // todo
|
|
||||||
fn complete_dot_in_attr() {
|
fn complete_dot_in_attr() {
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
|
@ -41,7 +40,6 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[ignore] // TODO
|
|
||||||
fn complete_dot_in_attr2() {
|
fn complete_dot_in_attr2() {
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
|
|
|
@ -101,8 +101,8 @@ impl RootDatabase {
|
||||||
hir::db::ExpandProcMacroQuery
|
hir::db::ExpandProcMacroQuery
|
||||||
hir::db::IncludeExpandQuery
|
hir::db::IncludeExpandQuery
|
||||||
hir::db::InternMacroCallQuery
|
hir::db::InternMacroCallQuery
|
||||||
|
hir::db::InternSyntaxContextQuery
|
||||||
hir::db::MacroArgQuery
|
hir::db::MacroArgQuery
|
||||||
hir::db::MacroExpandQuery
|
|
||||||
hir::db::ParseMacroExpansionQuery
|
hir::db::ParseMacroExpansionQuery
|
||||||
hir::db::RealSpanMapQuery
|
hir::db::RealSpanMapQuery
|
||||||
|
|
||||||
|
|
|
@ -157,7 +157,6 @@ impl RootDatabase {
|
||||||
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
|
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
|
||||||
// macro expansions are usually rather small, so we can afford to keep more of them alive
|
// macro expansions are usually rather small, so we can afford to keep more of them alive
|
||||||
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
|
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
|
||||||
hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
|
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
|
||||||
|
@ -175,12 +174,6 @@ impl RootDatabase {
|
||||||
.copied()
|
.copied()
|
||||||
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
|
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
|
||||||
);
|
);
|
||||||
hir_db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(
|
|
||||||
lru_capacities
|
|
||||||
.get(stringify!(MacroExpandQuery))
|
|
||||||
.copied()
|
|
||||||
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
|
|
||||||
);
|
|
||||||
|
|
||||||
macro_rules! update_lru_capacity_per_query {
|
macro_rules! update_lru_capacity_per_query {
|
||||||
($( $module:ident :: $query:ident )*) => {$(
|
($( $module:ident :: $query:ident )*) => {$(
|
||||||
|
|
|
@ -43,7 +43,7 @@ use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
|
use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
|
||||||
|
|
||||||
pub type SpanDataIndexMap = IndexSet<SpanData>;
|
type SpanDataIndexMap = IndexSet<SpanData>;
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct TokenId(pub u32);
|
pub struct TokenId(pub u32);
|
||||||
|
@ -54,12 +54,6 @@ impl std::fmt::Debug for TokenId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenId {
|
|
||||||
pub const DEF_SITE: Self = TokenId(0);
|
|
||||||
pub const CALL_SITE: Self = TokenId(0);
|
|
||||||
pub const MIXED_SITE: Self = TokenId(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
impl tt::Span for TokenId {
|
impl tt::Span for TokenId {
|
||||||
const DUMMY: Self = TokenId(!0);
|
const DUMMY: Self = TokenId(!0);
|
||||||
|
|
||||||
|
|
|
@ -31,9 +31,8 @@ use std::{
|
||||||
time::SystemTime,
|
time::SystemTime,
|
||||||
};
|
};
|
||||||
|
|
||||||
use ::tt::Span;
|
|
||||||
use proc_macro_api::{
|
use proc_macro_api::{
|
||||||
msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION, HAS_GLOBAL_SPANS},
|
msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION},
|
||||||
ProcMacroKind,
|
ProcMacroKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue