Token-based reverse-mapping

This commit is contained in:
Aleksey Kladov 2019-11-18 15:08:39 +03:00
parent 5e8c22dbf3
commit b79d678923
3 changed files with 52 additions and 38 deletions

View file

@ -20,7 +20,7 @@ use ra_db::{salsa, CrateId, FileId};
use ra_syntax::{ use ra_syntax::{
algo, algo,
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextRange, TextUnit, SyntaxNode, SyntaxToken, TextUnit,
}; };
use crate::ast_id_map::FileAstId; use crate::ast_id_map::FileAstId;
@ -79,22 +79,17 @@ impl HirFileId {
HirFileIdRepr::MacroFile(macro_file) => { HirFileIdRepr::MacroFile(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
let arg_start = loc.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); let arg_tt = loc.ast_id.to_node(db).token_tree()?;
let def_start = let def_tt = loc.def.ast_id.to_node(db).token_tree()?;
loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
let macro_def = db.macro_def(loc.def)?; let macro_def = db.macro_def(loc.def)?;
let (parse, exp_map) = db.parse_macro(macro_file)?; let (parse, exp_map) = db.parse_macro(macro_file)?;
let expanded = Source::new(self, parse.syntax_node());
let macro_arg = db.macro_arg(macro_file.macro_call_id)?; let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
let arg_start = (loc.ast_id.file_id, arg_start);
let def_start = (loc.def.ast_id.file_id, def_start);
Some(ExpansionInfo { Some(ExpansionInfo {
expanded, expanded: Source::new(self, parse.syntax_node()),
arg_start, arg: Source::new(loc.ast_id.file_id, arg_tt),
def_start, def: Source::new(loc.ast_id.file_id, def_tt),
macro_arg, macro_arg,
macro_def, macro_def,
exp_map, exp_map,
@ -159,8 +154,8 @@ impl MacroCallId {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo { pub struct ExpansionInfo {
expanded: Source<SyntaxNode>, expanded: Source<SyntaxNode>,
arg_start: (HirFileId, TextUnit), arg: Source<ast::TokenTree>,
def_start: (HirFileId, TextUnit), def: Source<ast::TokenTree>,
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
@ -169,8 +164,9 @@ pub struct ExpansionInfo {
impl ExpansionInfo { impl ExpansionInfo {
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
assert_eq!(token.file_id, self.arg_start.0); assert_eq!(token.file_id, self.arg.file_id);
let range = token.ast.text_range().checked_sub(self.arg_start.1)?; let range =
token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?;
let token_id = self.macro_arg.1.token_by_range(range)?; let token_id = self.macro_arg.1.token_by_range(range)?;
let token_id = self.macro_def.0.map_id_down(token_id); let token_id = self.macro_def.0.map_id_down(token_id);
@ -181,25 +177,22 @@ impl ExpansionInfo {
Some(self.expanded.with_ast(token)) Some(self.expanded.with_ast(token))
} }
// FIXME: a more correct signature would be pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
// `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>>` let token_id = self.exp_map.token_by_range(token.ast.text_range())?;
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
let token_id = look_in_rev_map(&self.exp_map, from)?;
let (token_id, origin) = self.macro_def.0.map_id_up(token_id); let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
let (token_map, tt) = match origin {
let (token_map, (file_id, start_offset)) = match origin { mbe::Origin::Call => (&self.macro_arg.1, &self.arg),
mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), mbe::Origin::Def => (&self.macro_def.1, &self.def),
mbe::Origin::Def => (&self.macro_def.1, self.def_start),
}; };
let range = token_map.relative_range_of(token_id)?; let range = token_map.relative_range_of(token_id)?;
let token = algo::find_covering_element(
return Some((file_id, range + start_offset)); tt.ast.syntax(),
range + tt.ast.syntax().text_range().start(),
fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> { )
exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1) .into_token()?;
} Some(tt.with_ast(token))
} }
} }

View file

@ -8,15 +8,32 @@ use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken};
use crate::{db::RootDatabase, FileRange}; use crate::{db::RootDatabase, FileRange};
pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange { pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange {
let text_range = node.ast.text_range(); let expansion = match node.file_id.expansion_info(db) {
let (file_id, range) = node None => {
.file_id return FileRange {
.expansion_info(db) file_id: node.file_id.original_file(db),
.and_then(|expansion_info| expansion_info.find_range(text_range)) range: node.ast.text_range(),
.unwrap_or((node.file_id, text_range)); }
}
Some(it) => it,
};
// FIXME: the following completely wrong.
//
// *First*, we should try to map first and last tokens of node, and, if that
// fails, return the range of the overall macro expansions.
//
// *Second*, we should handle recurside macro expansions
// FIXME: handle recursive macro generated macro let token = node
FileRange { file_id: file_id.original_file(db), range } .ast
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find_map(|it| expansion.map_token_up(node.with_ast(&it)));
match token {
Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() },
None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() },
}
} }
pub(crate) fn descend_into_macros( pub(crate) fn descend_into_macros(

View file

@ -20,7 +20,7 @@ pub struct TokenMap {
/// Maps relative range of the expanded syntax node to `tt::TokenId` /// Maps relative range of the expanded syntax node to `tt::TokenId`
#[derive(Debug, PartialEq, Eq, Default)] #[derive(Debug, PartialEq, Eq, Default)]
pub struct RevTokenMap { pub struct RevTokenMap {
pub ranges: Vec<(TextRange, tt::TokenId)>, ranges: Vec<(TextRange, tt::TokenId)>,
} }
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
@ -96,6 +96,10 @@ impl TokenMap {
} }
impl RevTokenMap { impl RevTokenMap {
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
self.ranges.iter().find(|&it| it.0 == relative_range).map(|it| it.1)
}
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> { pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?;
Some(r) Some(r)