mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 13:51:31 +00:00
Rename and fix typos
This commit is contained in:
parent
d8b7ba201e
commit
e6709f64af
4 changed files with 43 additions and 24 deletions
|
@ -18,6 +18,12 @@ pub struct ParseMacroWithInfo {
|
||||||
pub expansion_info: Arc<ExpansionInfo>,
|
pub expansion_info: Arc<ExpansionInfo>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
|
pub struct MacroExpandInfo {
|
||||||
|
pub arg_map: Arc<mbe::TokenMap>,
|
||||||
|
pub def_map: Arc<mbe::TokenMap>,
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: rename to ExpandDatabase
|
// FIXME: rename to ExpandDatabase
|
||||||
#[salsa::query_group(AstDatabaseStorage)]
|
#[salsa::query_group(AstDatabaseStorage)]
|
||||||
pub trait AstDatabase: SourceDatabase {
|
pub trait AstDatabase: SourceDatabase {
|
||||||
|
@ -35,7 +41,7 @@ pub trait AstDatabase: SourceDatabase {
|
||||||
fn macro_expand(
|
fn macro_expand(
|
||||||
&self,
|
&self,
|
||||||
macro_call: MacroCallId,
|
macro_call: MacroCallId,
|
||||||
) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String>;
|
) -> Result<(Arc<tt::Subtree>, MacroExpandInfo), String>;
|
||||||
|
|
||||||
fn macro_expansion_info(&self, macro_file: MacroFile) -> Option<Arc<ExpansionInfo>>;
|
fn macro_expansion_info(&self, macro_file: MacroFile) -> Option<Arc<ExpansionInfo>>;
|
||||||
}
|
}
|
||||||
|
@ -77,7 +83,7 @@ pub(crate) fn macro_arg(
|
||||||
pub(crate) fn macro_expand(
|
pub(crate) fn macro_expand(
|
||||||
db: &dyn AstDatabase,
|
db: &dyn AstDatabase,
|
||||||
id: MacroCallId,
|
id: MacroCallId,
|
||||||
) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String> {
|
) -> Result<(Arc<tt::Subtree>, MacroExpandInfo), String> {
|
||||||
let loc = db.lookup_intern_macro(id);
|
let loc = db.lookup_intern_macro(id);
|
||||||
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
|
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
|
||||||
|
|
||||||
|
@ -89,7 +95,10 @@ pub(crate) fn macro_expand(
|
||||||
return Err(format!("Total tokens count exceed limit : count = {}", count));
|
return Err(format!("Total tokens count exceed limit : count = {}", count));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((Arc::new(tt), (macro_arg.1.clone(), macro_rules.1.clone())))
|
Ok((
|
||||||
|
Arc::new(tt),
|
||||||
|
MacroExpandInfo { arg_map: macro_arg.1.clone(), def_map: macro_rules.1.clone() },
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
|
pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
|
||||||
|
@ -133,7 +142,7 @@ pub(crate) fn parse_macro_with_info(
|
||||||
};
|
};
|
||||||
|
|
||||||
res.map(|(parsed, exp_map)| {
|
res.map(|(parsed, exp_map)| {
|
||||||
let (arg_map, def_map) = tt.1;
|
let expand_info = tt.1;
|
||||||
let loc: MacroCallLoc = db.lookup_intern_macro(macro_call_id);
|
let loc: MacroCallLoc = db.lookup_intern_macro(macro_call_id);
|
||||||
|
|
||||||
let def_start =
|
let def_start =
|
||||||
|
@ -141,11 +150,12 @@ pub(crate) fn parse_macro_with_info(
|
||||||
let arg_start =
|
let arg_start =
|
||||||
loc.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start());
|
loc.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start());
|
||||||
|
|
||||||
let arg_map =
|
let arg_map = arg_start
|
||||||
arg_start.map(|start| exp_map.ranges(&arg_map, start)).unwrap_or_else(|| Vec::new());
|
.map(|start| exp_map.ranges(&expand_info.arg_map, start))
|
||||||
|
.unwrap_or_else(|| Vec::new());
|
||||||
let def_map =
|
let def_map = def_start
|
||||||
def_start.map(|start| exp_map.ranges(&def_map, start)).unwrap_or_else(|| Vec::new());
|
.map(|start| exp_map.ranges(&expand_info.def_map, start))
|
||||||
|
.unwrap_or_else(|| Vec::new());
|
||||||
|
|
||||||
let info = ExpansionInfo { arg_map, def_map };
|
let info = ExpansionInfo { arg_map, def_map };
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ impl MacroCallId {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
/// ExpansionInfo mainly describle how to map text range between src and expaned macro
|
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
||||||
pub struct ExpansionInfo {
|
pub struct ExpansionInfo {
|
||||||
pub arg_map: Vec<(TextRange, TextRange)>,
|
pub arg_map: Vec<(TextRange, TextRange)>,
|
||||||
pub def_map: Vec<(TextRange, TextRange)>,
|
pub def_map: Vec<(TextRange, TextRange)>,
|
||||||
|
|
|
@ -26,7 +26,11 @@ static TOKEN_MAP_COUNTER: AtomicU32 = AtomicU32::new(0);
|
||||||
|
|
||||||
/// Generate an unique token map id for each instance
|
/// Generate an unique token map id for each instance
|
||||||
fn make_uniq_token_map_id() -> u32 {
|
fn make_uniq_token_map_id() -> u32 {
|
||||||
TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst)
|
let res = TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst);
|
||||||
|
if res == std::u32::MAX {
|
||||||
|
panic!("TOKEN_MAP_COUNTER is overflowed");
|
||||||
|
}
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::default::Default for TokenMap {
|
impl std::default::Default for TokenMap {
|
||||||
|
@ -35,10 +39,9 @@ impl std::default::Default for TokenMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps Relative range of the expanded syntax node to `tt::TokenId`
|
/// Maps relative range of the expanded syntax node to `tt::TokenId`
|
||||||
#[derive(Debug, PartialEq, Eq, Default)]
|
#[derive(Debug, PartialEq, Eq, Default)]
|
||||||
pub struct ExpandedRangeMap {
|
pub struct ExpandedRangeMap {
|
||||||
/// Maps `tt::TokenId` to the *relative* source range.
|
|
||||||
ranges: Vec<(TextRange, tt::TokenId)>,
|
ranges: Vec<(TextRange, tt::TokenId)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,14 +88,13 @@ fn fragment_to_syntax_node(
|
||||||
};
|
};
|
||||||
let buffer = TokenBuffer::new(&tokens);
|
let buffer = TokenBuffer::new(&tokens);
|
||||||
let mut token_source = SubtreeTokenSource::new(&buffer);
|
let mut token_source = SubtreeTokenSource::new(&buffer);
|
||||||
let mut range_map = ExpandedRangeMap::default();
|
let mut tree_sink = TtTreeSink::new(buffer.begin());
|
||||||
let mut tree_sink = TtTreeSink::new(buffer.begin(), &mut range_map);
|
|
||||||
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
|
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
|
||||||
if tree_sink.roots.len() != 1 {
|
if tree_sink.roots.len() != 1 {
|
||||||
return Err(ExpandError::ConversionError);
|
return Err(ExpandError::ConversionError);
|
||||||
}
|
}
|
||||||
//FIXME: would be cool to report errors
|
//FIXME: would be cool to report errors
|
||||||
let parse = tree_sink.inner.finish();
|
let (parse, range_map) = tree_sink.finish();
|
||||||
Ok((parse, range_map))
|
Ok((parse, range_map))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -320,7 +322,7 @@ struct TtTreeSink<'a> {
|
||||||
cursor: Cursor<'a>,
|
cursor: Cursor<'a>,
|
||||||
text_pos: TextUnit,
|
text_pos: TextUnit,
|
||||||
inner: SyntaxTreeBuilder,
|
inner: SyntaxTreeBuilder,
|
||||||
range_map: &'a mut ExpandedRangeMap,
|
range_map: ExpandedRangeMap,
|
||||||
|
|
||||||
// Number of roots
|
// Number of roots
|
||||||
// Use for detect ill-form tree which is not single root
|
// Use for detect ill-form tree which is not single root
|
||||||
|
@ -328,16 +330,20 @@ struct TtTreeSink<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TtTreeSink<'a> {
|
impl<'a> TtTreeSink<'a> {
|
||||||
fn new(cursor: Cursor<'a>, range_map: &'a mut ExpandedRangeMap) -> Self {
|
fn new(cursor: Cursor<'a>) -> Self {
|
||||||
TtTreeSink {
|
TtTreeSink {
|
||||||
buf: String::new(),
|
buf: String::new(),
|
||||||
cursor,
|
cursor,
|
||||||
text_pos: 0.into(),
|
text_pos: 0.into(),
|
||||||
inner: SyntaxTreeBuilder::default(),
|
inner: SyntaxTreeBuilder::default(),
|
||||||
roots: smallvec::SmallVec::new(),
|
roots: smallvec::SmallVec::new(),
|
||||||
range_map,
|
range_map: ExpandedRangeMap::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn finish(self) -> (Parse<SyntaxNode>, ExpandedRangeMap) {
|
||||||
|
(self.inner.finish(), self.range_map)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
|
fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
|
||||||
|
|
|
@ -25,23 +25,26 @@ use smol_str::SmolStr;
|
||||||
/// source token and making sure that identities are preserved during macro
|
/// source token and making sure that identities are preserved during macro
|
||||||
/// expansion.
|
/// expansion.
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct TokenId(u32, u32);
|
pub struct TokenId {
|
||||||
|
token_id: u32,
|
||||||
|
map_id: u32,
|
||||||
|
}
|
||||||
|
|
||||||
impl TokenId {
|
impl TokenId {
|
||||||
pub fn new(token_id: u32, map_id: u32) -> TokenId {
|
pub fn new(token_id: u32, map_id: u32) -> TokenId {
|
||||||
TokenId(token_id, map_id)
|
TokenId { token_id, map_id }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const fn unspecified() -> TokenId {
|
pub const fn unspecified() -> TokenId {
|
||||||
TokenId(!0, !0)
|
TokenId { token_id: !0, map_id: !0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn token_id(&self) -> u32 {
|
pub fn token_id(&self) -> u32 {
|
||||||
self.0
|
self.token_id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_id(&self) -> u32 {
|
pub fn map_id(&self) -> u32 {
|
||||||
self.1
|
self.map_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue