mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 14:21:44 +00:00
Merge #421
421: Index macros r=matklad a=matklad So, this is pretty cool! We now index items which are the result of macro expansion! (of a single currently hard-coded macro). So, workspace symbols can now be used to navigate to `HirDatabase`, for example Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
2fcc6bdafa
9 changed files with 549 additions and 465 deletions
10
Cargo.lock
generated
10
Cargo.lock
generated
|
@ -680,6 +680,7 @@ dependencies = [
|
||||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"salsa 0.9.0 (git+https://github.com/matklad/salsa.git?branch=transitive-untracked)",
|
"salsa 0.9.0 (git+https://github.com/matklad/salsa.git?branch=transitive-untracked)",
|
||||||
"test_utils 0.1.0",
|
"test_utils 0.1.0",
|
||||||
|
"unicase 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1353,6 +1354,14 @@ dependencies = [
|
||||||
"unic-common 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unic-common 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicase"
|
||||||
|
version = "2.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-bidi"
|
name = "unicode-bidi"
|
||||||
version = "0.3.4"
|
version = "0.3.4"
|
||||||
|
@ -1616,6 +1625,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum unic-segment 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c9ca47cbb09fb5fcd066b5867d11dc528302fa465277882797d6a836e1ee6f9e"
|
"checksum unic-segment 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c9ca47cbb09fb5fcd066b5867d11dc528302fa465277882797d6a836e1ee6f9e"
|
||||||
"checksum unic-ucd-segment 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "48f1a08ce0409a9e391b88d1930118eec48af12742fc538bcec55f775865776e"
|
"checksum unic-ucd-segment 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "48f1a08ce0409a9e391b88d1930118eec48af12742fc538bcec55f775865776e"
|
||||||
"checksum unic-ucd-version 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1f5e6c6c53c2d0ece4a5964bc55fcff8602153063cb4fab20958ff32998ff6"
|
"checksum unic-ucd-version 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1f5e6c6c53c2d0ece4a5964bc55fcff8602153063cb4fab20958ff32998ff6"
|
||||||
|
"checksum unicase 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d3218ea14b4edcaccfa0df0a64a3792a2c32cc706f1b336e48867f9d3147f90"
|
||||||
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
||||||
"checksum unicode-normalization 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6a0180bc61fc5a987082bfa111f4cc95c4caff7f9799f3e46df09163a937aa25"
|
"checksum unicode-normalization 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6a0180bc61fc5a987082bfa111f4cc95c4caff7f9799f3e46df09163a937aa25"
|
||||||
"checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
|
"checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
|
||||||
|
|
|
@ -13,6 +13,7 @@ fst = "0.3.1"
|
||||||
salsa = "0.9.0"
|
salsa = "0.9.0"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.0"
|
||||||
parking_lot = "0.7.0"
|
parking_lot = "0.7.0"
|
||||||
|
unicase = "2.2.0"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_editor = { path = "../ra_editor" }
|
ra_editor = { path = "../ra_editor" }
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
//! file in the current workspace, and run a query aginst the union of all
|
//! file in the current workspace, and run a query aginst the union of all
|
||||||
//! thouse fsts.
|
//! thouse fsts.
|
||||||
use std::{
|
use std::{
|
||||||
|
cmp::Ordering,
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
@ -27,11 +28,11 @@ use std::{
|
||||||
use fst::{self, Streamer};
|
use fst::{self, Streamer};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SyntaxNodeRef, SourceFileNode, SmolStr,
|
SyntaxNodeRef, SourceFileNode, SmolStr,
|
||||||
algo::visit::{visitor, Visitor},
|
algo::{visit::{visitor, Visitor}, find_covering_node},
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
ast::{self, NameOwner},
|
ast::{self, NameOwner},
|
||||||
};
|
};
|
||||||
use ra_db::{SyntaxDatabase, SourceRootId, FilesDatabase, LocalSyntaxPtr};
|
use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr};
|
||||||
use salsa::ParallelDatabase;
|
use salsa::ParallelDatabase;
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
|
||||||
|
@ -41,7 +42,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
salsa::query_group! {
|
salsa::query_group! {
|
||||||
pub(crate) trait SymbolsDatabase: SyntaxDatabase {
|
pub(crate) trait SymbolsDatabase: hir::db::HirDatabase {
|
||||||
fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
|
fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
|
||||||
type FileSymbolsQuery;
|
type FileSymbolsQuery;
|
||||||
}
|
}
|
||||||
|
@ -52,10 +53,23 @@ salsa::query_group! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
|
fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
|
||||||
db.check_canceled()?;
|
db.check_canceled()?;
|
||||||
let syntax = db.source_file(file_id);
|
let source_file = db.source_file(file_id);
|
||||||
Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
|
let mut symbols = source_file
|
||||||
|
.syntax()
|
||||||
|
.descendants()
|
||||||
|
.filter_map(to_symbol)
|
||||||
|
.map(move |(name, ptr)| FileSymbol { name, ptr, file_id })
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? {
|
||||||
|
let node = find_covering_node(source_file.syntax(), text_range);
|
||||||
|
let ptr = LocalSyntaxPtr::new(node);
|
||||||
|
symbols.push(FileSymbol { file_id, name, ptr })
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Arc::new(SymbolIndex::new(symbols)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> {
|
pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> {
|
||||||
|
@ -111,6 +125,17 @@ impl Hash for SymbolIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SymbolIndex {
|
impl SymbolIndex {
|
||||||
|
fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex {
|
||||||
|
fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering {
|
||||||
|
unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str()))
|
||||||
|
}
|
||||||
|
symbols.par_sort_by(cmp);
|
||||||
|
symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal);
|
||||||
|
let names = symbols.iter().map(|it| it.name.as_str().to_lowercase());
|
||||||
|
let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap();
|
||||||
|
SymbolIndex { symbols, map }
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
pub(crate) fn len(&self) -> usize {
|
||||||
self.symbols.len()
|
self.symbols.len()
|
||||||
}
|
}
|
||||||
|
@ -118,29 +143,16 @@ impl SymbolIndex {
|
||||||
pub(crate) fn for_files(
|
pub(crate) fn for_files(
|
||||||
files: impl ParallelIterator<Item = (FileId, SourceFileNode)>,
|
files: impl ParallelIterator<Item = (FileId, SourceFileNode)>,
|
||||||
) -> SymbolIndex {
|
) -> SymbolIndex {
|
||||||
let mut symbols = files
|
let symbols = files
|
||||||
.flat_map(|(file_id, file)| {
|
.flat_map(|(file_id, file)| {
|
||||||
file.syntax()
|
file.syntax()
|
||||||
.descendants()
|
.descendants()
|
||||||
.filter_map(to_symbol)
|
.filter_map(to_symbol)
|
||||||
.map(move |(name, ptr)| {
|
.map(move |(name, ptr)| FileSymbol { name, ptr, file_id })
|
||||||
(
|
|
||||||
name.as_str().to_lowercase(),
|
|
||||||
FileSymbol { name, ptr, file_id },
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0));
|
SymbolIndex::new(symbols)
|
||||||
symbols.dedup_by(|s1, s2| s1.0 == s2.0);
|
|
||||||
let (names, symbols): (Vec<String>, Vec<FileSymbol>) = symbols.into_iter().unzip();
|
|
||||||
let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap();
|
|
||||||
SymbolIndex { symbols, map }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn for_file(file_id: FileId, file: SourceFileNode) -> SymbolIndex {
|
|
||||||
SymbolIndex::for_files(rayon::iter::once((file_id, file)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ use test_utils::{assert_eq_dbg, assert_eq_text};
|
||||||
|
|
||||||
use ra_analysis::{
|
use ra_analysis::{
|
||||||
mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis},
|
mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis},
|
||||||
AnalysisChange, CrateGraph, FileId, FnSignatureInfo,
|
AnalysisChange, CrateGraph, FileId, FnSignatureInfo, Query
|
||||||
};
|
};
|
||||||
|
|
||||||
fn get_signature(text: &str) -> (FnSignatureInfo, Option<usize>) {
|
fn get_signature(text: &str) -> (FnSignatureInfo, Option<usize>) {
|
||||||
|
@ -531,6 +531,7 @@ fn test_rename_for_mut_param() {
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_rename(text: &str, new_name: &str, expected: &str) {
|
fn test_rename(text: &str, new_name: &str, expected: &str) {
|
||||||
let (analysis, position) = single_file_with_position(text);
|
let (analysis, position) = single_file_with_position(text);
|
||||||
let edits = analysis.rename(position, new_name).unwrap();
|
let edits = analysis.rename(position, new_name).unwrap();
|
||||||
|
@ -547,3 +548,19 @@ fn test_rename(text: &str, new_name: &str, expected: &str) {
|
||||||
.apply(&*analysis.file_text(file_id.unwrap()));
|
.apply(&*analysis.file_text(file_id.unwrap()));
|
||||||
assert_eq_text!(expected, &*result);
|
assert_eq_text!(expected, &*result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn world_symbols_include_stuff_from_macros() {
|
||||||
|
let (analysis, _) = single_file(
|
||||||
|
"
|
||||||
|
salsa::query_group! {
|
||||||
|
pub trait HirDatabase: SyntaxDatabase {}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut symbols = analysis.symbol_search(Query::new("Hir".into())).unwrap();
|
||||||
|
let s = symbols.pop().unwrap();
|
||||||
|
assert_eq!(s.name(), "HirDatabase");
|
||||||
|
assert_eq!(s.range(), TextRange::from_to(33.into(), 44.into()));
|
||||||
|
}
|
||||||
|
|
|
@ -48,6 +48,13 @@ impl HirFileId {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn as_macro_call_id(self) -> Option<MacroCallId> {
|
||||||
|
match self.0 {
|
||||||
|
HirFileIdRepr::Macro(it) => Some(it),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn hir_source_file(db: &impl HirDatabase, file_id: HirFileId) -> SourceFileNode {
|
pub(crate) fn hir_source_file(db: &impl HirDatabase, file_id: HirFileId) -> SourceFileNode {
|
||||||
match file_id.0 {
|
match file_id.0 {
|
||||||
HirFileIdRepr::File(file_id) => db.source_file(file_id),
|
HirFileIdRepr::File(file_id) => db.source_file(file_id),
|
||||||
|
|
|
@ -64,14 +64,14 @@ impl ModuleScope {
|
||||||
/// running name resolution.
|
/// running name resolution.
|
||||||
#[derive(Debug, Default, PartialEq, Eq)]
|
#[derive(Debug, Default, PartialEq, Eq)]
|
||||||
pub struct InputModuleItems {
|
pub struct InputModuleItems {
|
||||||
items: Vec<ModuleItem>,
|
pub(crate) items: Vec<ModuleItem>,
|
||||||
imports: Vec<Import>,
|
imports: Vec<Import>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
struct ModuleItem {
|
pub(crate) struct ModuleItem {
|
||||||
id: SourceItemId,
|
pub(crate) id: SourceItemId,
|
||||||
name: Name,
|
pub(crate) name: Name,
|
||||||
kind: SyntaxKind,
|
kind: SyntaxKind,
|
||||||
vis: Vis,
|
vis: Vis,
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,8 @@
|
||||||
use ra_db::{FileId, FilePosition, Cancelable};
|
use ra_db::{FileId, FilePosition, Cancelable};
|
||||||
use ra_editor::find_node_at_offset;
|
use ra_editor::find_node_at_offset;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
|
SmolStr, TextRange, SyntaxNodeRef,
|
||||||
ast::{self, AstNode, NameOwner},
|
ast::{self, AstNode, NameOwner},
|
||||||
SyntaxNodeRef,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -126,3 +126,40 @@ pub fn function_from_child_node(
|
||||||
let fn_def = ctry!(node.ancestors().find_map(ast::FnDef::cast));
|
let fn_def = ctry!(node.ancestors().find_map(ast::FnDef::cast));
|
||||||
function_from_source(db, file_id, fn_def)
|
function_from_source(db, file_id, fn_def)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn macro_symbols(
|
||||||
|
db: &impl HirDatabase,
|
||||||
|
file_id: FileId,
|
||||||
|
) -> Cancelable<Vec<(SmolStr, TextRange)>> {
|
||||||
|
let module = match module_from_file_id(db, file_id)? {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return Ok(Vec::new()),
|
||||||
|
};
|
||||||
|
let items = db.input_module_items(module.source_root_id, module.module_id)?;
|
||||||
|
let mut res = Vec::new();
|
||||||
|
|
||||||
|
for macro_call_id in items
|
||||||
|
.items
|
||||||
|
.iter()
|
||||||
|
.filter_map(|it| it.id.file_id.as_macro_call_id())
|
||||||
|
{
|
||||||
|
if let Some(exp) = db.expand_macro_invocation(macro_call_id) {
|
||||||
|
let loc = macro_call_id.loc(db);
|
||||||
|
let syntax = db.file_item(loc.source_item_id);
|
||||||
|
let syntax = syntax.borrowed();
|
||||||
|
let macro_call = ast::MacroCall::cast(syntax).unwrap();
|
||||||
|
let off = macro_call.token_tree().unwrap().syntax().range().start();
|
||||||
|
let file = exp.file();
|
||||||
|
for trait_def in file.syntax().descendants().filter_map(ast::TraitDef::cast) {
|
||||||
|
if let Some(name) = trait_def.name() {
|
||||||
|
let dst_range = name.syntax().range();
|
||||||
|
if let Some(src_range) = exp.map_range_back(dst_range) {
|
||||||
|
res.push((name.text(), src_range + off))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
872
editors/code/package-lock.json
generated
872
editors/code/package-lock.json
generated
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue