reuse hover results with resultset

This commit is contained in:
hamidreza kalbasi 2021-09-18 22:14:47 +04:30
parent 70061d2b7e
commit f2775ac2e9
3 changed files with 173 additions and 94 deletions

View file

@ -87,7 +87,7 @@ pub use crate::{
references::ReferenceSearchResult, references::ReferenceSearchResult,
rename::RenameError, rename::RenameError,
runnables::{Runnable, RunnableKind, TestId}, runnables::{Runnable, RunnableKind, TestId},
static_index::{StaticIndex, StaticIndexedFile, TokenStaticData}, static_index::{StaticIndex, StaticIndexedFile, TokenStaticData, TokenId},
syntax_highlighting::{ syntax_highlighting::{
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
HlRange, HlRange,

View file

@ -1,31 +1,62 @@
//! This module provides `StaticIndex` which is used for powering //! This module provides `StaticIndex` which is used for powering
//! read-only code browsers and emitting LSIF //! read-only code browsers and emitting LSIF
use std::collections::HashMap;
use hir::{db::HirDatabase, Crate, Module}; use hir::{db::HirDatabase, Crate, Module};
use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; use ide_db::base_db::{FileId, SourceDatabaseExt};
use ide_db::RootDatabase; use ide_db::RootDatabase;
use ide_db::defs::Definition;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::TextRange; use syntax::TextRange;
use syntax::{AstNode, SyntaxKind::*, T}; use syntax::{AstNode, SyntaxKind::*, T};
use crate::hover::{get_definition_of_token, hover_for_definition};
use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult};
/// A static representation of fully analyzed source code. /// A static representation of fully analyzed source code.
/// ///
/// The intended use-case is powering read-only code browsers and emitting LSIF /// The intended use-case is powering read-only code browsers and emitting LSIF
pub struct StaticIndex { pub struct StaticIndex<'a> {
pub files: Vec<StaticIndexedFile>, pub files: Vec<StaticIndexedFile>,
pub tokens: TokenStore,
analysis: &'a Analysis,
db: &'a RootDatabase,
def_map: HashMap<Definition, TokenId>,
} }
pub struct TokenStaticData { pub struct TokenStaticData {
pub range: TextRange,
pub hover: Option<HoverResult>, pub hover: Option<HoverResult>,
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(usize);
#[derive(Default)]
pub struct TokenStore(Vec<TokenStaticData>);
impl TokenStore {
pub fn insert(&mut self, data: TokenStaticData) -> TokenId {
let id = TokenId(self.0.len());
self.0.push(data);
id
}
pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
self.0.get(id.0)
}
pub fn iter(self) -> impl Iterator<Item=(TokenId, TokenStaticData)> {
self.0.into_iter().enumerate().map(|(i, x)| {
(TokenId(i), x)
})
}
}
pub struct StaticIndexedFile { pub struct StaticIndexedFile {
pub file_id: FileId, pub file_id: FileId,
pub folds: Vec<Fold>, pub folds: Vec<Fold>,
pub tokens: Vec<TokenStaticData>, pub tokens: Vec<(TextRange, TokenId)>,
} }
fn all_modules(db: &dyn HirDatabase) -> Vec<Module> { fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
@ -41,25 +72,11 @@ fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
modules modules
} }
impl StaticIndex { impl StaticIndex<'_> {
pub fn compute(db: &RootDatabase, analysis: &Analysis) -> Cancellable<StaticIndex> { fn add_file(&mut self, file_id: FileId) -> Cancellable<()> {
let work = all_modules(db).into_iter().filter(|module| { let folds = self.analysis.folding_ranges(file_id)?;
let file_id = module.definition_source(db).file_id.original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
!source_root.is_library
});
let mut visited_files = FxHashSet::default();
let mut result_files = Vec::<StaticIndexedFile>::new();
for module in work {
let file_id = module.definition_source(db).file_id.original_file(db);
if visited_files.contains(&file_id) {
continue;
}
let folds = analysis.folding_ranges(file_id)?;
// hovers // hovers
let sema = hir::Semantics::new(db); let sema = hir::Semantics::new(self.db);
let tokens_or_nodes = sema.parse(file_id).syntax().clone(); let tokens_or_nodes = sema.parse(file_id).syntax().clone();
let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
syntax::NodeOrToken::Node(_) => None, syntax::NodeOrToken::Node(_) => None,
@ -74,29 +91,62 @@ impl StaticIndex {
| LIFETIME_IDENT | LIFETIME_IDENT
| T![self] | T![self]
| T![super] | T![super]
| T![crate] | T![crate] => true,
| T!['(']
| T![')'] => true,
_ => false, _ => false,
}) });
.map(|token| { let mut result = StaticIndexedFile {
let range = token.text_range();
let hover = analysis
.hover(
&hover_config,
FileRange {
file_id, file_id,
range: TextRange::new(range.start(), range.start()), folds,
}, tokens: vec![],
)? };
.map(|x| x.info); for token in tokens {
Ok(TokenStaticData { range, hover }) let range = token.text_range();
}) let node = token.parent().unwrap();
.collect::<Result<Vec<_>, _>>()?; let def = get_definition_of_token(self.db, &sema, &sema.descend_into_macros(token), file_id, range.start(), &mut None);
result_files.push(StaticIndexedFile { file_id, folds, tokens }); let def = if let Some(x) = def {
x
} else {
continue;
};
let id = if let Some(x) = self.def_map.get(&def) {
*x
} else {
let x = self.tokens.insert(TokenStaticData {
hover: hover_for_definition(self.db, file_id, &sema, def, node, &hover_config),
});
self.def_map.insert(def, x);
x
};
result.tokens.push((range, id));
}
self.files.push(result);
Ok(())
}
pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> Cancellable<StaticIndex<'a>> {
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source(db).file_id.original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
!source_root.is_library
});
let mut this = StaticIndex {
files: vec![],
tokens: Default::default(),
analysis, db,
def_map: Default::default(),
};
let mut visited_files = FxHashSet::default();
for module in work {
let file_id = module.definition_source(db).file_id.original_file(db);
if visited_files.contains(&file_id) {
continue;
}
this.add_file(file_id)?;
// mark the file // mark the file
visited_files.insert(file_id); visited_files.insert(file_id);
} }
Ok(StaticIndex { files: result_files }) //eprintln!("{:#?}", token_map);
Ok(this)
} }
} }

View file

@ -1,15 +1,16 @@
//! Lsif generator //! Lsif generator
use std::collections::HashMap;
use std::env; use std::env;
use std::time::Instant; use std::time::Instant;
use ide::{StaticIndex, StaticIndexedFile, TokenStaticData}; use ide::{Analysis, Cancellable, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData};
use ide_db::LineIndexDatabase; use ide_db::LineIndexDatabase;
use ide_db::base_db::salsa::{self, ParallelDatabase}; use ide_db::base_db::salsa::{self, ParallelDatabase};
use lsp_types::{lsif::*, Hover, HoverContents, NumberOrString}; use lsp_types::{lsif::*, Hover, HoverContents, NumberOrString};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
use vfs::AbsPathBuf; use vfs::{AbsPathBuf, Vfs};
use crate::cli::{ use crate::cli::{
flags, flags,
@ -27,9 +28,12 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
} }
} }
#[derive(Default)] struct LsifManager<'a> {
struct LsifManager {
count: i32, count: i32,
token_map: HashMap<TokenId, Id>,
analysis: &'a Analysis,
db: &'a RootDatabase,
vfs: &'a Vfs,
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -41,7 +45,17 @@ impl From<Id> for NumberOrString {
} }
} }
impl LsifManager { impl LsifManager<'_> {
fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> {
LsifManager {
count: 0,
token_map: HashMap::default(),
analysis,
db,
vfs,
}
}
fn add(&mut self, data: Element) -> Id { fn add(&mut self, data: Element) -> Id {
let id = Id(self.count); let id = Id(self.count);
self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap()); self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap());
@ -54,14 +68,9 @@ impl LsifManager {
println!("{}", data); println!("{}", data);
} }
fn add_tokens(&mut self, line_index: &LineIndex, doc_id: Id, tokens: Vec<TokenStaticData>) { fn add_token(&mut self, id: TokenId, token: TokenStaticData) {
let tokens_id = tokens let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None })));
.into_iter() self.token_map.insert(id, result_set_id);
.map(|token| {
let token_id = self.add(Element::Vertex(Vertex::Range {
range: to_proto::range(line_index, token.range),
tag: None,
}));
if let Some(hover) = token.hover { if let Some(hover) = token.hover {
let hover_id = self.add(Element::Vertex(Vertex::HoverResult { let hover_id = self.add(Element::Vertex(Vertex::HoverResult {
result: Hover { result: Hover {
@ -71,16 +80,55 @@ impl LsifManager {
})); }));
self.add(Element::Edge(Edge::Hover(EdgeData { self.add(Element::Edge(Edge::Hover(EdgeData {
in_v: hover_id.into(), in_v: hover_id.into(),
out_v: token_id.into(), out_v: result_set_id.into(),
}))); })));
} }
token_id.into() }
fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> {
let StaticIndexedFile { file_id, tokens, folds} = file;
let path = self.vfs.file_path(file_id);
let path = path.as_path().unwrap();
let doc_id = self.add(Element::Vertex(Vertex::Document(Document {
language_id: "rust".to_string(),
uri: lsp_types::Url::from_file_path(path).unwrap(),
})));
let text = self.analysis.file_text(file_id)?;
let line_index = self.db.line_index(file_id);
let line_index = LineIndex {
index: line_index.clone(),
encoding: OffsetEncoding::Utf16,
endings: LineEndings::Unix,
};
let result = folds
.into_iter()
.map(|it| to_proto::folding_range(&*text, &line_index, false, it))
.collect();
let folding_id = self.add(Element::Vertex(Vertex::FoldingRangeResult { result }));
self.add(Element::Edge(Edge::FoldingRange(EdgeData {
in_v: folding_id.into(),
out_v: doc_id.into(),
})));
let tokens_id = tokens
.into_iter()
.map(|(range, id)| {
let range_id = self.add(Element::Vertex(Vertex::Range {
range: to_proto::range(&line_index, range),
tag: None,
}));
let result_set_id = *self.token_map.get(&id).expect("token map doesn't contain id");
self.add(Element::Edge(Edge::Next(EdgeData {
in_v: result_set_id.into(),
out_v: range_id.into(),
})));
range_id.into()
}) })
.collect(); .collect();
self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn {
in_vs: tokens_id, in_vs: tokens_id,
out_v: doc_id.into(), out_v: doc_id.into(),
}))); })));
Ok(())
} }
} }
@ -106,37 +154,18 @@ impl flags::Lsif {
let si = StaticIndex::compute(db, &analysis)?; let si = StaticIndex::compute(db, &analysis)?;
let mut lsif = LsifManager::default(); let mut lsif = LsifManager::new(&analysis, db, &vfs);
lsif.add(Element::Vertex(Vertex::MetaData(MetaData { lsif.add(Element::Vertex(Vertex::MetaData(MetaData {
version: String::from("0.5.0"), version: String::from("0.5.0"),
project_root: lsp_types::Url::from_file_path(path).unwrap(), project_root: lsp_types::Url::from_file_path(path).unwrap(),
position_encoding: Encoding::Utf16, position_encoding: Encoding::Utf16,
tool_info: None, tool_info: None,
}))); })));
for StaticIndexedFile { file_id, folds, tokens } in si.files { for (id, token) in si.tokens.iter() {
let path = vfs.file_path(file_id); lsif.add_token(id, token);
let path = path.as_path().unwrap(); }
let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { for file in si.files {
language_id: "rust".to_string(), lsif.add_file(file)?;
uri: lsp_types::Url::from_file_path(path).unwrap(),
})));
let text = analysis.file_text(file_id)?;
let line_index = db.line_index(file_id);
let line_index = LineIndex {
index: line_index.clone(),
encoding: OffsetEncoding::Utf16,
endings: LineEndings::Unix,
};
let result = folds
.into_iter()
.map(|it| to_proto::folding_range(&*text, &line_index, false, it))
.collect();
let folding_id = lsif.add(Element::Vertex(Vertex::FoldingRangeResult { result }));
lsif.add(Element::Edge(Edge::FoldingRange(EdgeData {
in_v: folding_id.into(),
out_v: doc_id.into(),
})));
lsif.add_tokens(&line_index, doc_id, tokens);
} }
eprintln!("Generating LSIF finished in {:?}", now.elapsed()); eprintln!("Generating LSIF finished in {:?}", now.elapsed());
Ok(()) Ok(())