From 557210a68839286ebafa1e1565b978566b38a6f1 Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Wed, 8 Sep 2021 16:05:28 +0430 Subject: [PATCH 01/10] Begining of lsif --- crates/ide/src/lib.rs | 2 + crates/ide/src/static_index.rs | 60 +++ crates/rust-analyzer/src/bin/main.rs | 1 + crates/rust-analyzer/src/cli.rs | 1 + .../rust-analyzer/src/cli/analysis_stats.rs | 2 - crates/rust-analyzer/src/cli/flags.rs | 10 + crates/rust-analyzer/src/cli/lsif.rs | 116 ++++++ .../rust-analyzer/src/cli/lsif/lsif_types.rs | 354 ++++++++++++++++++ crates/rust-analyzer/src/line_index.rs | 6 + 9 files changed, 550 insertions(+), 2 deletions(-) create mode 100644 crates/ide/src/static_index.rs create mode 100644 crates/rust-analyzer/src/cli/lsif.rs create mode 100644 crates/rust-analyzer/src/cli/lsif/lsif_types.rs diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 21872c81d1..3879da6d03 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -46,6 +46,7 @@ mod references; mod rename; mod runnables; mod ssr; +mod static_index; mod status; mod syntax_highlighting; mod syntax_tree; @@ -86,6 +87,7 @@ pub use crate::{ references::ReferenceSearchResult, rename::RenameError, runnables::{Runnable, RunnableKind, TestId}, + static_index::{StaticIndex, StaticIndexedFile}, syntax_highlighting::{ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HlRange, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs new file mode 100644 index 0000000000..1b384853be --- /dev/null +++ b/crates/ide/src/static_index.rs @@ -0,0 +1,60 @@ +use hir::{db::HirDatabase, Crate, Module}; +use ide_db::base_db::{FileId, SourceDatabaseExt}; +use ide_db::RootDatabase; +use rustc_hash::FxHashSet; + +use crate::{Analysis, Cancellable, Fold}; + +/// A static representation of fully analyzed source code. +/// +/// The intended use-case is powering read-only code browsers and emitting LSIF +pub struct StaticIndex { + pub files: Vec, +} + +pub struct StaticIndexedFile { + pub file_id: FileId, + pub folds: Vec, +} + +fn all_modules(db: &dyn HirDatabase) -> Vec { + let mut worklist: Vec<_> = + Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect(); + let mut modules = Vec::new(); + + while let Some(module) = worklist.pop() { + modules.push(module); + worklist.extend(module.children(db)); + } + + modules +} + +impl StaticIndex { + pub fn compute(db: &RootDatabase, analysis: &Analysis) -> Cancellable { + let work = all_modules(db).into_iter().filter(|module| { + let file_id = module.definition_source(db).file_id.original_file(db); + let source_root = db.file_source_root(file_id); + let source_root = db.source_root(source_root); + !source_root.is_library + }); + + let mut visited_files = FxHashSet::default(); + let mut result_files = Vec::::new(); + for module in work { + let file_id = module.definition_source(db).file_id.original_file(db); + if !visited_files.contains(&file_id) { + //let path = vfs.file_path(file_id); + //let path = path.as_path().unwrap(); + //let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { + // language_id: Language::Rust, + // uri: lsp_types::Url::from_file_path(path).unwrap(), + //}))); + let folds = analysis.folding_ranges(file_id)?; + result_files.push(StaticIndexedFile { file_id, folds }); + visited_files.insert(file_id); + } + } + Ok(StaticIndex { files: result_files }) + } +} diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 2390bee824..74c041020b 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -87,6 +87,7 @@ fn try_main() -> Result<()> { flags::RustAnalyzerCmd::Diagnostics(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?, + flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?, } Ok(()) } diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index efd8a2aa9f..6ccdaa86dd 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs @@ -8,6 +8,7 @@ mod highlight; mod analysis_stats; mod diagnostics; mod ssr; +mod lsif; mod progress_report; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index a2dc842e75..55a542c3c1 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -367,8 +367,6 @@ fn expr_syntax_range( ) -> Option<(VfsPath, LineCol, LineCol)> { let src = sm.expr_syntax(expr_id); if let Ok(src) = src { - // FIXME: it might be nice to have a function (on Analysis?) that goes from Source -> (LineCol, LineCol) directly - // But also, we should just turn the type mismatches into diagnostics and provide these let root = db.parse_or_expand(src.file_id).unwrap(); let node = src.map(|e| e.to_node(&root).syntax().clone()); let original_range = node.as_ref().original_file_range(db); diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index e2e250143c..b759d912c9 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -102,6 +102,10 @@ xflags::xflags! { } cmd proc-macro {} + + cmd lsif + required path: PathBuf + {} } } @@ -129,6 +133,7 @@ pub enum RustAnalyzerCmd { Ssr(Ssr), Search(Search), ProcMacro(ProcMacro), + Lsif(Lsif), } #[derive(Debug)] @@ -190,6 +195,11 @@ pub struct Search { #[derive(Debug)] pub struct ProcMacro; +#[derive(Debug)] +pub struct Lsif { + pub path: PathBuf, +} + impl RustAnalyzer { pub const HELP: &'static str = Self::HELP_; diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs new file mode 100644 index 0000000000..fbd5b642a6 --- /dev/null +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -0,0 +1,116 @@ +//! Lsif generator + +use std::env; + +use ide::{StaticIndex, StaticIndexedFile}; +use ide_db::LineIndexDatabase; + +use ide_db::base_db::salsa::{self, ParallelDatabase}; +use lsp_types::NumberOrString; +use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; +use vfs::AbsPathBuf; + +use crate::cli::lsif::lsif_types::{Document, Vertex}; +use crate::cli::{ + flags, + load_cargo::{load_workspace, LoadCargoConfig}, + Result, +}; +use crate::line_index::LineIndex; +use crate::to_proto; + +/// Need to wrap Snapshot to provide `Clone` impl for `map_with` +struct Snap(DB); +impl Clone for Snap> { + fn clone(&self) -> Snap> { + Snap(self.0.snapshot()) + } +} + +mod lsif_types; +use lsif_types::*; + +#[derive(Default)] +struct LsifManager { + count: i32, +} + +#[derive(Clone, Copy)] +struct Id(i32); + +impl From for NumberOrString { + fn from(Id(x): Id) -> Self { + NumberOrString::Number(x) + } +} + +impl LsifManager { + fn add(&mut self, data: Element) -> Id { + let id = Id(self.count); + self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap()); + self.count += 1; + id + } + + // FIXME: support file in addition to stdout here + fn emit(&self, data: &str) { + println!("{}", data); + } +} + +impl flags::Lsif { + pub fn run(self) -> Result<()> { + let cargo_config = CargoConfig::default(); + let no_progress = &|_| (); + let load_cargo_config = LoadCargoConfig { + load_out_dirs_from_check: true, + with_proc_macro: true, + prefill_caches: false, + }; + let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path)); + let manifest = ProjectManifest::discover_single(&path)?; + + let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?; + + let (host, vfs, _proc_macro) = load_workspace(workspace, &load_cargo_config)?; + let db = host.raw_database(); + let analysis = host.analysis(); + + let si = StaticIndex::compute(db, &analysis)?; + + let mut lsif = LsifManager::default(); + lsif.add(Element::Vertex(Vertex::MetaData { + version: String::from("0.5.0"), + project_root: lsp_types::Url::from_file_path(path).unwrap(), + position_encoding: Encoding::Utf16, + tool_info: None, + })); + for StaticIndexedFile { file_id, folds } in si.files { + let path = vfs.file_path(file_id); + let path = path.as_path().unwrap(); + let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { + language_id: Language::Rust, + uri: lsp_types::Url::from_file_path(path).unwrap(), + }))); + let text = analysis.file_text(file_id)?; + let line_index = db.line_index(file_id); + let result = folds + .into_iter() + .map(|it| { + to_proto::folding_range( + &*text, + &LineIndex::with_default_options(line_index.clone()), + false, + it, + ) + }) + .collect(); + let folding_id = lsif.add(Element::Vertex(Vertex::FoldingRangeResult { result })); + lsif.add(Element::Edge(Edge::FoldingRange(EdgeData { + in_v: folding_id.into(), + out_v: doc_id.into(), + }))); + } + Ok(()) + } +} diff --git a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs new file mode 100644 index 0000000000..bd29fd3ad8 --- /dev/null +++ b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs @@ -0,0 +1,354 @@ +use lsp_types::FoldingRange; +use serde::{Deserialize, Serialize}; + +pub(crate) type RangeId = lsp_types::NumberOrString; + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub(crate) enum LocationOrRangeId { + Location(lsp_types::Location), + RangeId(RangeId), +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct Entry { + pub(crate) id: lsp_types::NumberOrString, + #[serde(flatten)] + pub(crate) data: Element, +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "type")] +pub(crate) enum Element { + Vertex(Vertex), + Edge(Edge), +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub(crate) struct ToolInfo { + name: String, + #[serde(skip_serializing_if = "Option::is_none")] + args: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + version: Option, +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub(crate) enum Encoding { + /// Currently only 'utf-16' is supported due to the limitations in LSP. + #[serde(rename = "utf-16")] + Utf16, +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "label")] +pub(crate) enum Vertex { + #[serde(rename_all = "camelCase")] + MetaData { + /// The version of the LSIF format using semver notation. See https://semver.org/. Please note + /// the version numbers starting with 0 don't adhere to semver and adopters have to assume + /// that each new version is breaking. + version: String, + + /// The project root (in form of an URI) used to compute this dump. + project_root: lsp_types::Url, + + /// The string encoding used to compute line and character values in + /// positions and ranges. + position_encoding: Encoding, + + /// Information about the tool that created the dump + #[serde(skip_serializing_if = "Option::is_none")] + tool_info: Option, + }, + /// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex + Project(Project), + Document(Document), + /// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#ranges + Range(lsp_types::Range), + /// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set + ResultSet(ResultSet), + + // FIXME: support all kind of results + DefinitionResult { + result: DefinitionResultType, + }, + FoldingRangeResult { + result: Vec, + }, +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "label")] +pub(crate) enum Edge { + Contains(EdgeData), + RefersTo(EdgeData), + Item(Item), + + // Methods + #[serde(rename = "textDocument/definition")] + Definition(EdgeData), + #[serde(rename = "textDocument/declaration")] + Declaration(EdgeData), + #[serde(rename = "textDocument/hover")] + Hover(EdgeData), + #[serde(rename = "textDocument/references")] + References(EdgeData), + #[serde(rename = "textDocument/implementation")] + Implementation(EdgeData), + #[serde(rename = "textDocument/typeDefinition")] + TypeDefinition(EdgeData), + #[serde(rename = "textDocument/foldingRange")] + FoldingRange(EdgeData), + #[serde(rename = "textDocument/documentLink")] + DocumentLink(EdgeData), + #[serde(rename = "textDocument/documentSymbol")] + DocumentSymbol(EdgeData), + #[serde(rename = "textDocument/diagnostic")] + Diagnostic(EdgeData), +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct EdgeData { + pub(crate) in_v: lsp_types::NumberOrString, + pub(crate) out_v: lsp_types::NumberOrString, +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub(crate) enum DefinitionResultType { + Scalar(LocationOrRangeId), + Array(LocationOrRangeId), +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "property")] +pub(crate) enum Item { + Definition(EdgeData), + Reference(EdgeData), +} + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct Document { + pub(crate) uri: lsp_types::Url, + pub(crate) language_id: Language, +} + +/// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct ResultSet { + #[serde(skip_serializing_if = "Option::is_none")] + key: Option, +} + +/// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct Project { + project_file: lsp_types::Url, + language_id: Language, +} + +/// https://github.com/Microsoft/language-server-protocol/issues/213 +/// For examples, see: https://code.visualstudio.com/docs/languages/identifiers. +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub(crate) enum Language { + Rust, + TypeScript, + #[serde(other)] + Other, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn metadata() { + let data = Entry { + id: lsp_types::NumberOrString::Number(1), + data: Element::Vertex(Vertex::MetaData { + version: String::from("0.4.3"), + project_root: lsp_types::Url::from_file_path("/hello/world").unwrap(), + position_encoding: Encoding::Utf16, + tool_info: Some(ToolInfo { + name: String::from("lsif-tsc"), + args: Some(vec![String::from("-p"), String::from(".")]), + version: Some(String::from("0.7.2")), + }), + }), + }; + let text = r#"{"id":1,"type":"vertex","label":"metaData","version":"0.4.3","projectRoot":"file:///hello/world","positionEncoding":"utf-16","toolInfo":{"name":"lsif-tsc","args":["-p","."],"version":"0.7.2"}}"# + .replace(' ', ""); + assert_eq!(serde_json::to_string(&data).unwrap(), text); + assert_eq!(serde_json::from_str::(&text).unwrap(), data); + } + + #[test] + fn document() { + let data = Entry { + id: lsp_types::NumberOrString::Number(1), + data: Element::Vertex(Vertex::Document(Document { + uri: lsp_types::Url::from_file_path("/Users/dirkb/sample.ts").unwrap(), + language_id: Language::TypeScript, + })), + }; + + let text = r#"{ "id": 1, "type": "vertex", "label": "document", "uri": "file:///Users/dirkb/sample.ts", "languageId": "typescript" }"# + .replace(' ', ""); + + assert_eq!(serde_json::to_string(&data).unwrap(), text); + assert_eq!(serde_json::from_str::(&text).unwrap(), data); + } + + #[test] + fn range() { + let data = Entry { + id: lsp_types::NumberOrString::Number(4), + data: Element::Vertex(Vertex::Range(lsp_types::Range::new( + lsp_types::Position::new(0, 9), + lsp_types::Position::new(0, 12), + ))), + }; + + let text = r#"{ "id": 4, "type": "vertex", "label": "range", "start": { "line": 0, "character": 9}, "end": { "line": 0, "character": 12 } }"# + .replace(' ', ""); + + assert_eq!(serde_json::to_string(&data).unwrap(), text); + assert_eq!(serde_json::from_str::(&text).unwrap(), data); + } + + #[test] + fn contains() { + let data = Entry { + id: lsp_types::NumberOrString::Number(5), + data: Element::Edge(Edge::Contains(EdgeData { + in_v: lsp_types::NumberOrString::Number(4), + out_v: lsp_types::NumberOrString::Number(1), + })), + }; + + let text = r#"{ "id": 5, "type": "edge", "label": "contains", "outV": 1, "inV": 4}"# + .replace(' ', ""); + + assert_eq!( + serde_json::from_str::(&text).unwrap(), + serde_json::to_value(&data).unwrap() + ); + } + + #[test] + fn refers_to() { + let data = Entry { + id: lsp_types::NumberOrString::Number(5), + data: Element::Edge(Edge::RefersTo(EdgeData { + in_v: lsp_types::NumberOrString::Number(2), + out_v: lsp_types::NumberOrString::Number(3), + })), + }; + + let text = r#"{ "id": 5, "type": "edge", "label": "refersTo", "outV": 3, "inV": 2}"# + .replace(' ', ""); + + assert_eq!( + serde_json::from_str::(&text).unwrap(), + serde_json::to_value(&data).unwrap() + ); + } + + #[test] + fn result_set() { + let data = Entry { + id: lsp_types::NumberOrString::Number(2), + data: Element::Vertex(Vertex::ResultSet(ResultSet { key: None })), + }; + + let text = r#"{ "id": 2, "type": "vertex", "label": "resultSet" }"#.replace(' ', ""); + + assert_eq!(serde_json::to_string(&data).unwrap(), text); + assert_eq!(serde_json::from_str::(&text).unwrap(), data); + + let data = Entry { + id: lsp_types::NumberOrString::Number(4), + data: Element::Vertex(Vertex::ResultSet(ResultSet { + key: Some(String::from("hello")), + })), + }; + + let text = r#"{ "id": 4, "type": "vertex", "label": "resultSet", "key": "hello" }"# + .replace(' ', ""); + + assert_eq!(serde_json::to_string(&data).unwrap(), text); + assert_eq!(serde_json::from_str::(&text).unwrap(), data); + } + + #[test] + fn definition() { + let data = Entry { + id: lsp_types::NumberOrString::Number(21), + data: Element::Edge(Edge::Item(Item::Definition(EdgeData { + in_v: lsp_types::NumberOrString::Number(18), + out_v: lsp_types::NumberOrString::Number(16), + }))), + }; + + let text = r#"{ "id": 21, "type": "edge", "label": "item", "property": "definition", "outV": 16, "inV": 18}"# + .replace(' ', ""); + + assert_eq!( + serde_json::from_str::(&text).unwrap(), + serde_json::to_value(&data).unwrap() + ); + } + + mod methods { + use super::*; + + #[test] + fn references() { + let data = Entry { + id: lsp_types::NumberOrString::Number(17), + data: Element::Edge(Edge::References(EdgeData { + in_v: lsp_types::NumberOrString::Number(16), + out_v: lsp_types::NumberOrString::Number(15), + })), + }; + + let text = r#"{ "id": 17, "type": "edge", "label": "textDocument/references", "outV": 15, "inV": 16 }"#; + + assert_eq!( + serde_json::from_str::(&text).unwrap(), + serde_json::to_value(&data).unwrap() + ); + } + + #[test] + fn definition() { + let data = Entry { + id: lsp_types::NumberOrString::Number(13), + data: Element::Vertex(Vertex::DefinitionResult { + result: DefinitionResultType::Scalar(LocationOrRangeId::RangeId( + lsp_types::NumberOrString::Number(7), + )), + }), + }; + + let text = + r#"{ "id": 13, "type": "vertex", "label": "definitionResult", "result": 7 }"#; + + assert_eq!( + serde_json::from_str::(&text).unwrap(), + serde_json::to_value(&data).unwrap() + ); + } + } +} diff --git a/crates/rust-analyzer/src/line_index.rs b/crates/rust-analyzer/src/line_index.rs index c116414da0..6d46171cc3 100644 --- a/crates/rust-analyzer/src/line_index.rs +++ b/crates/rust-analyzer/src/line_index.rs @@ -18,6 +18,12 @@ pub(crate) struct LineIndex { pub(crate) encoding: OffsetEncoding, } +impl LineIndex { + pub(crate) fn with_default_options(index: Arc) -> Self { + Self { index, endings: LineEndings::Unix, encoding: OffsetEncoding::Utf8 } + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(crate) enum LineEndings { Unix, From 51632018479bab917415768797983c3f768679dd Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Wed, 8 Sep 2021 20:26:06 +0430 Subject: [PATCH 02/10] use utf16 instead of utf8 --- crates/ide/src/static_index.rs | 3 +++ crates/rust-analyzer/src/cli/lsif.rs | 8 ++++++-- crates/rust-analyzer/src/cli/lsif/lsif_types.rs | 3 +++ crates/rust-analyzer/src/line_index.rs | 6 ------ 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 1b384853be..806230544f 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,3 +1,6 @@ +//! This module provides `StaticIndex` which is used for powering +//! read-only code browsers and emitting LSIF + use hir::{db::HirDatabase, Crate, Module}; use ide_db::base_db::{FileId, SourceDatabaseExt}; use ide_db::RootDatabase; diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index fbd5b642a6..2ba965a15d 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -16,7 +16,7 @@ use crate::cli::{ load_cargo::{load_workspace, LoadCargoConfig}, Result, }; -use crate::line_index::LineIndex; +use crate::line_index::{LineEndings, LineIndex, OffsetEncoding}; use crate::to_proto; /// Need to wrap Snapshot to provide `Clone` impl for `map_with` @@ -99,7 +99,11 @@ impl flags::Lsif { .map(|it| { to_proto::folding_range( &*text, - &LineIndex::with_default_options(line_index.clone()), + &LineIndex { + index: line_index.clone(), + encoding: OffsetEncoding::Utf16, + endings: LineEndings::Unix, + }, false, it, ) diff --git a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs index bd29fd3ad8..d84155d8a5 100644 --- a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs +++ b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs @@ -1,3 +1,6 @@ +//! This module provides LSIF types. This module is a temporary solution +//! and it will go to its own repository in future + use lsp_types::FoldingRange; use serde::{Deserialize, Serialize}; diff --git a/crates/rust-analyzer/src/line_index.rs b/crates/rust-analyzer/src/line_index.rs index 6d46171cc3..c116414da0 100644 --- a/crates/rust-analyzer/src/line_index.rs +++ b/crates/rust-analyzer/src/line_index.rs @@ -18,12 +18,6 @@ pub(crate) struct LineIndex { pub(crate) encoding: OffsetEncoding, } -impl LineIndex { - pub(crate) fn with_default_options(index: Arc) -> Self { - Self { index, endings: LineEndings::Unix, encoding: OffsetEncoding::Utf8 } - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(crate) enum LineEndings { Unix, From 1103e390e049cea91f9b06bad9f0885c7a43cf6c Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Thu, 9 Sep 2021 17:57:46 +0430 Subject: [PATCH 03/10] cfg unix for tests with unix path --- crates/rust-analyzer/src/cli/lsif/lsif_types.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs index d84155d8a5..1681840f29 100644 --- a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs +++ b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs @@ -176,6 +176,7 @@ mod tests { use super::*; #[test] + #[cfg(unix)] fn metadata() { let data = Entry { id: lsp_types::NumberOrString::Number(1), @@ -197,6 +198,7 @@ mod tests { } #[test] + #[cfg(unix)] fn document() { let data = Entry { id: lsp_types::NumberOrString::Number(1), From e803bd25c4adf4c5487e7b0f6495dfab07b11daf Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Fri, 10 Sep 2021 20:00:53 +0430 Subject: [PATCH 04/10] add hover --- crates/ide/src/lib.rs | 2 +- crates/ide/src/static_index.rs | 63 +++++++++++++++---- crates/rust-analyzer/src/cli/lsif.rs | 57 ++++++++++++----- .../rust-analyzer/src/cli/lsif/lsif_types.rs | 35 +++++------ 4 files changed, 108 insertions(+), 49 deletions(-) diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 3879da6d03..5b2384a054 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -87,7 +87,7 @@ pub use crate::{ references::ReferenceSearchResult, rename::RenameError, runnables::{Runnable, RunnableKind, TestId}, - static_index::{StaticIndex, StaticIndexedFile}, + static_index::{StaticIndex, StaticIndexedFile, TokenStaticData}, syntax_highlighting::{ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HlRange, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 806230544f..ab7a829bca 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -2,11 +2,13 @@ //! read-only code browsers and emitting LSIF use hir::{db::HirDatabase, Crate, Module}; -use ide_db::base_db::{FileId, SourceDatabaseExt}; +use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; use ide_db::RootDatabase; use rustc_hash::FxHashSet; +use syntax::TextRange; +use syntax::{AstNode, SyntaxKind::*, T}; -use crate::{Analysis, Cancellable, Fold}; +use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; /// A static representation of fully analyzed source code. /// @@ -15,9 +17,15 @@ pub struct StaticIndex { pub files: Vec, } +pub struct TokenStaticData { + pub range: TextRange, + pub hover: Option, +} + pub struct StaticIndexedFile { pub file_id: FileId, pub folds: Vec, + pub tokens: Vec, } fn all_modules(db: &dyn HirDatabase) -> Vec { @@ -46,17 +54,48 @@ impl StaticIndex { let mut result_files = Vec::::new(); for module in work { let file_id = module.definition_source(db).file_id.original_file(db); - if !visited_files.contains(&file_id) { - //let path = vfs.file_path(file_id); - //let path = path.as_path().unwrap(); - //let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { - // language_id: Language::Rust, - // uri: lsp_types::Url::from_file_path(path).unwrap(), - //}))); - let folds = analysis.folding_ranges(file_id)?; - result_files.push(StaticIndexedFile { file_id, folds }); - visited_files.insert(file_id); + if visited_files.contains(&file_id) { + continue; } + let folds = analysis.folding_ranges(file_id)?; + // hovers + let sema = hir::Semantics::new(db); + let tokens_or_nodes = sema.parse(file_id).syntax().clone(); + let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { + syntax::NodeOrToken::Node(_) => None, + syntax::NodeOrToken::Token(x) => Some(x), + }); + let hover_config = + HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; + let tokens = tokens + .filter(|token| match token.kind() { + IDENT + | INT_NUMBER + | LIFETIME_IDENT + | T![self] + | T![super] + | T![crate] + | T!['('] + | T![')'] => true, + _ => false, + }) + .map(|token| { + let range = token.text_range(); + let hover = analysis + .hover( + &hover_config, + FileRange { + file_id, + range: TextRange::new(range.start(), range.start()), + }, + )? + .map(|x| x.info); + Ok(TokenStaticData { range, hover }) + }) + .collect::, _>>()?; + result_files.push(StaticIndexedFile { file_id, folds, tokens }); + // mark the file + visited_files.insert(file_id); } Ok(StaticIndex { files: result_files }) } diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 2ba965a15d..12a9919e36 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -2,11 +2,11 @@ use std::env; -use ide::{StaticIndex, StaticIndexedFile}; +use ide::{StaticIndex, StaticIndexedFile, TokenStaticData}; use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; -use lsp_types::NumberOrString; +use lsp_types::{Hover, HoverContents, NumberOrString}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use vfs::AbsPathBuf; @@ -56,6 +56,38 @@ impl LsifManager { fn emit(&self, data: &str) { println!("{}", data); } + + fn add_tokens( + &mut self, + line_index: &LineIndex, + doc_id: Id, + tokens: Vec, + ) { + let tokens_id = tokens + .into_iter() + .map(|token| { + let token_id = self + .add(Element::Vertex(Vertex::Range(to_proto::range(line_index, token.range)))); + if let Some(hover) = token.hover { + let hover_id = self.add(Element::Vertex(Vertex::HoverResult { + result: Hover { + contents: HoverContents::Markup(to_proto::markup_content(hover.markup)), + range: None, + }, + })); + self.add(Element::Edge(Edge::Hover(EdgeData { + in_v: hover_id.into(), + out_v: token_id.into(), + }))); + } + token_id.into() + }) + .collect(); + self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { + in_vs: tokens_id, + out_v: doc_id.into(), + }))); + } } impl flags::Lsif { @@ -85,7 +117,7 @@ impl flags::Lsif { position_encoding: Encoding::Utf16, tool_info: None, })); - for StaticIndexedFile { file_id, folds } in si.files { + for StaticIndexedFile { file_id, folds, tokens } in si.files { let path = vfs.file_path(file_id); let path = path.as_path().unwrap(); let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { @@ -94,26 +126,21 @@ impl flags::Lsif { }))); let text = analysis.file_text(file_id)?; let line_index = db.line_index(file_id); + let line_index = LineIndex { + index: line_index.clone(), + encoding: OffsetEncoding::Utf16, + endings: LineEndings::Unix, + }; let result = folds .into_iter() - .map(|it| { - to_proto::folding_range( - &*text, - &LineIndex { - index: line_index.clone(), - encoding: OffsetEncoding::Utf16, - endings: LineEndings::Unix, - }, - false, - it, - ) - }) + .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) .collect(); let folding_id = lsif.add(Element::Vertex(Vertex::FoldingRangeResult { result })); lsif.add(Element::Edge(Edge::FoldingRange(EdgeData { in_v: folding_id.into(), out_v: doc_id.into(), }))); + lsif.add_tokens(&line_index, doc_id, tokens); } Ok(()) } diff --git a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs index 1681840f29..7bb59f6429 100644 --- a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs +++ b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs @@ -1,7 +1,7 @@ //! This module provides LSIF types. This module is a temporary solution //! and it will go to its own repository in future -use lsp_types::FoldingRange; +use lsp_types::{FoldingRange, Hover}; use serde::{Deserialize, Serialize}; pub(crate) type RangeId = lsp_types::NumberOrString; @@ -82,13 +82,16 @@ pub(crate) enum Vertex { FoldingRangeResult { result: Vec, }, + HoverResult { + result: Hover, + } } #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(tag = "label")] pub(crate) enum Edge { - Contains(EdgeData), + Contains(EdgeDataMultiIn), RefersTo(EdgeData), Item(Item), @@ -122,6 +125,15 @@ pub(crate) struct EdgeData { pub(crate) out_v: lsp_types::NumberOrString, } +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct EdgeDataMultiIn { + pub(crate) in_vs: Vec, + pub(crate) out_v: lsp_types::NumberOrString, +} + + + #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub(crate) enum DefinitionResultType { @@ -232,25 +244,6 @@ mod tests { assert_eq!(serde_json::from_str::(&text).unwrap(), data); } - #[test] - fn contains() { - let data = Entry { - id: lsp_types::NumberOrString::Number(5), - data: Element::Edge(Edge::Contains(EdgeData { - in_v: lsp_types::NumberOrString::Number(4), - out_v: lsp_types::NumberOrString::Number(1), - })), - }; - - let text = r#"{ "id": 5, "type": "edge", "label": "contains", "outV": 1, "inV": 4}"# - .replace(' ', ""); - - assert_eq!( - serde_json::from_str::(&text).unwrap(), - serde_json::to_value(&data).unwrap() - ); - } - #[test] fn refers_to() { let data = Entry { From 70061d2b7e7e2ec9d7b0704f18c488985dcd7417 Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Sat, 18 Sep 2021 19:19:24 +0430 Subject: [PATCH 05/10] move lsif types to lsp types crate --- Cargo.lock | 3 +- crates/rust-analyzer/Cargo.toml | 2 +- crates/rust-analyzer/src/cli/lsif.rs | 29 +- .../rust-analyzer/src/cli/lsif/lsif_types.rs | 352 ------------------ 4 files changed, 15 insertions(+), 371 deletions(-) delete mode 100644 crates/rust-analyzer/src/cli/lsif/lsif_types.rs diff --git a/Cargo.lock b/Cargo.lock index 9170ec3a66..2055512a9d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -863,8 +863,7 @@ dependencies = [ [[package]] name = "lsp-types" version = "0.89.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852e0dedfd52cc32325598b2631e0eba31b7b708959676a9f837042f276b09a2" +source = "git+https://github.com/gluon-lang/lsp-types#6b43b1f7184ab379f0a6f89673bb10a8de11d12f" dependencies = [ "bitflags", "serde", diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 1dfa23414d..f501679510 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -22,7 +22,7 @@ crossbeam-channel = "0.5.0" dissimilar = "1.0.2" itertools = "0.10.0" jod-thread = "0.1.0" -lsp-types = { version = "0.89.0", features = ["proposed"] } +lsp-types = { git = "https://github.com/gluon-lang/lsp-types", features = ["proposed"] } parking_lot = "0.11.0" xflags = "0.2.1" oorandom = "11.1.2" diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 12a9919e36..9d7d48f0bb 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -1,16 +1,16 @@ //! Lsif generator use std::env; +use std::time::Instant; use ide::{StaticIndex, StaticIndexedFile, TokenStaticData}; use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; -use lsp_types::{Hover, HoverContents, NumberOrString}; +use lsp_types::{lsif::*, Hover, HoverContents, NumberOrString}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use vfs::AbsPathBuf; -use crate::cli::lsif::lsif_types::{Document, Vertex}; use crate::cli::{ flags, load_cargo::{load_workspace, LoadCargoConfig}, @@ -27,9 +27,6 @@ impl Clone for Snap> { } } -mod lsif_types; -use lsif_types::*; - #[derive(Default)] struct LsifManager { count: i32, @@ -57,17 +54,14 @@ impl LsifManager { println!("{}", data); } - fn add_tokens( - &mut self, - line_index: &LineIndex, - doc_id: Id, - tokens: Vec, - ) { + fn add_tokens(&mut self, line_index: &LineIndex, doc_id: Id, tokens: Vec) { let tokens_id = tokens .into_iter() .map(|token| { - let token_id = self - .add(Element::Vertex(Vertex::Range(to_proto::range(line_index, token.range)))); + let token_id = self.add(Element::Vertex(Vertex::Range { + range: to_proto::range(line_index, token.range), + tag: None, + })); if let Some(hover) = token.hover { let hover_id = self.add(Element::Vertex(Vertex::HoverResult { result: Hover { @@ -92,6 +86,8 @@ impl LsifManager { impl flags::Lsif { pub fn run(self) -> Result<()> { + eprintln!("Generating LSIF started..."); + let now = Instant::now(); let cargo_config = CargoConfig::default(); let no_progress = &|_| (); let load_cargo_config = LoadCargoConfig { @@ -111,17 +107,17 @@ impl flags::Lsif { let si = StaticIndex::compute(db, &analysis)?; let mut lsif = LsifManager::default(); - lsif.add(Element::Vertex(Vertex::MetaData { + lsif.add(Element::Vertex(Vertex::MetaData(MetaData { version: String::from("0.5.0"), project_root: lsp_types::Url::from_file_path(path).unwrap(), position_encoding: Encoding::Utf16, tool_info: None, - })); + }))); for StaticIndexedFile { file_id, folds, tokens } in si.files { let path = vfs.file_path(file_id); let path = path.as_path().unwrap(); let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { - language_id: Language::Rust, + language_id: "rust".to_string(), uri: lsp_types::Url::from_file_path(path).unwrap(), }))); let text = analysis.file_text(file_id)?; @@ -142,6 +138,7 @@ impl flags::Lsif { }))); lsif.add_tokens(&line_index, doc_id, tokens); } + eprintln!("Generating LSIF finished in {:?}", now.elapsed()); Ok(()) } } diff --git a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs b/crates/rust-analyzer/src/cli/lsif/lsif_types.rs deleted file mode 100644 index 7bb59f6429..0000000000 --- a/crates/rust-analyzer/src/cli/lsif/lsif_types.rs +++ /dev/null @@ -1,352 +0,0 @@ -//! This module provides LSIF types. This module is a temporary solution -//! and it will go to its own repository in future - -use lsp_types::{FoldingRange, Hover}; -use serde::{Deserialize, Serialize}; - -pub(crate) type RangeId = lsp_types::NumberOrString; - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub(crate) enum LocationOrRangeId { - Location(lsp_types::Location), - RangeId(RangeId), -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct Entry { - pub(crate) id: lsp_types::NumberOrString, - #[serde(flatten)] - pub(crate) data: Element, -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[serde(tag = "type")] -pub(crate) enum Element { - Vertex(Vertex), - Edge(Edge), -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -pub(crate) struct ToolInfo { - name: String, - #[serde(skip_serializing_if = "Option::is_none")] - args: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - version: Option, -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -pub(crate) enum Encoding { - /// Currently only 'utf-16' is supported due to the limitations in LSP. - #[serde(rename = "utf-16")] - Utf16, -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[serde(tag = "label")] -pub(crate) enum Vertex { - #[serde(rename_all = "camelCase")] - MetaData { - /// The version of the LSIF format using semver notation. See https://semver.org/. Please note - /// the version numbers starting with 0 don't adhere to semver and adopters have to assume - /// that each new version is breaking. - version: String, - - /// The project root (in form of an URI) used to compute this dump. - project_root: lsp_types::Url, - - /// The string encoding used to compute line and character values in - /// positions and ranges. - position_encoding: Encoding, - - /// Information about the tool that created the dump - #[serde(skip_serializing_if = "Option::is_none")] - tool_info: Option, - }, - /// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex - Project(Project), - Document(Document), - /// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#ranges - Range(lsp_types::Range), - /// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set - ResultSet(ResultSet), - - // FIXME: support all kind of results - DefinitionResult { - result: DefinitionResultType, - }, - FoldingRangeResult { - result: Vec, - }, - HoverResult { - result: Hover, - } -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[serde(tag = "label")] -pub(crate) enum Edge { - Contains(EdgeDataMultiIn), - RefersTo(EdgeData), - Item(Item), - - // Methods - #[serde(rename = "textDocument/definition")] - Definition(EdgeData), - #[serde(rename = "textDocument/declaration")] - Declaration(EdgeData), - #[serde(rename = "textDocument/hover")] - Hover(EdgeData), - #[serde(rename = "textDocument/references")] - References(EdgeData), - #[serde(rename = "textDocument/implementation")] - Implementation(EdgeData), - #[serde(rename = "textDocument/typeDefinition")] - TypeDefinition(EdgeData), - #[serde(rename = "textDocument/foldingRange")] - FoldingRange(EdgeData), - #[serde(rename = "textDocument/documentLink")] - DocumentLink(EdgeData), - #[serde(rename = "textDocument/documentSymbol")] - DocumentSymbol(EdgeData), - #[serde(rename = "textDocument/diagnostic")] - Diagnostic(EdgeData), -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct EdgeData { - pub(crate) in_v: lsp_types::NumberOrString, - pub(crate) out_v: lsp_types::NumberOrString, -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct EdgeDataMultiIn { - pub(crate) in_vs: Vec, - pub(crate) out_v: lsp_types::NumberOrString, -} - - - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub(crate) enum DefinitionResultType { - Scalar(LocationOrRangeId), - Array(LocationOrRangeId), -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[serde(tag = "property")] -pub(crate) enum Item { - Definition(EdgeData), - Reference(EdgeData), -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct Document { - pub(crate) uri: lsp_types::Url, - pub(crate) language_id: Language, -} - -/// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct ResultSet { - #[serde(skip_serializing_if = "Option::is_none")] - key: Option, -} - -/// https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct Project { - project_file: lsp_types::Url, - language_id: Language, -} - -/// https://github.com/Microsoft/language-server-protocol/issues/213 -/// For examples, see: https://code.visualstudio.com/docs/languages/identifiers. -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub(crate) enum Language { - Rust, - TypeScript, - #[serde(other)] - Other, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - #[cfg(unix)] - fn metadata() { - let data = Entry { - id: lsp_types::NumberOrString::Number(1), - data: Element::Vertex(Vertex::MetaData { - version: String::from("0.4.3"), - project_root: lsp_types::Url::from_file_path("/hello/world").unwrap(), - position_encoding: Encoding::Utf16, - tool_info: Some(ToolInfo { - name: String::from("lsif-tsc"), - args: Some(vec![String::from("-p"), String::from(".")]), - version: Some(String::from("0.7.2")), - }), - }), - }; - let text = r#"{"id":1,"type":"vertex","label":"metaData","version":"0.4.3","projectRoot":"file:///hello/world","positionEncoding":"utf-16","toolInfo":{"name":"lsif-tsc","args":["-p","."],"version":"0.7.2"}}"# - .replace(' ', ""); - assert_eq!(serde_json::to_string(&data).unwrap(), text); - assert_eq!(serde_json::from_str::(&text).unwrap(), data); - } - - #[test] - #[cfg(unix)] - fn document() { - let data = Entry { - id: lsp_types::NumberOrString::Number(1), - data: Element::Vertex(Vertex::Document(Document { - uri: lsp_types::Url::from_file_path("/Users/dirkb/sample.ts").unwrap(), - language_id: Language::TypeScript, - })), - }; - - let text = r#"{ "id": 1, "type": "vertex", "label": "document", "uri": "file:///Users/dirkb/sample.ts", "languageId": "typescript" }"# - .replace(' ', ""); - - assert_eq!(serde_json::to_string(&data).unwrap(), text); - assert_eq!(serde_json::from_str::(&text).unwrap(), data); - } - - #[test] - fn range() { - let data = Entry { - id: lsp_types::NumberOrString::Number(4), - data: Element::Vertex(Vertex::Range(lsp_types::Range::new( - lsp_types::Position::new(0, 9), - lsp_types::Position::new(0, 12), - ))), - }; - - let text = r#"{ "id": 4, "type": "vertex", "label": "range", "start": { "line": 0, "character": 9}, "end": { "line": 0, "character": 12 } }"# - .replace(' ', ""); - - assert_eq!(serde_json::to_string(&data).unwrap(), text); - assert_eq!(serde_json::from_str::(&text).unwrap(), data); - } - - #[test] - fn refers_to() { - let data = Entry { - id: lsp_types::NumberOrString::Number(5), - data: Element::Edge(Edge::RefersTo(EdgeData { - in_v: lsp_types::NumberOrString::Number(2), - out_v: lsp_types::NumberOrString::Number(3), - })), - }; - - let text = r#"{ "id": 5, "type": "edge", "label": "refersTo", "outV": 3, "inV": 2}"# - .replace(' ', ""); - - assert_eq!( - serde_json::from_str::(&text).unwrap(), - serde_json::to_value(&data).unwrap() - ); - } - - #[test] - fn result_set() { - let data = Entry { - id: lsp_types::NumberOrString::Number(2), - data: Element::Vertex(Vertex::ResultSet(ResultSet { key: None })), - }; - - let text = r#"{ "id": 2, "type": "vertex", "label": "resultSet" }"#.replace(' ', ""); - - assert_eq!(serde_json::to_string(&data).unwrap(), text); - assert_eq!(serde_json::from_str::(&text).unwrap(), data); - - let data = Entry { - id: lsp_types::NumberOrString::Number(4), - data: Element::Vertex(Vertex::ResultSet(ResultSet { - key: Some(String::from("hello")), - })), - }; - - let text = r#"{ "id": 4, "type": "vertex", "label": "resultSet", "key": "hello" }"# - .replace(' ', ""); - - assert_eq!(serde_json::to_string(&data).unwrap(), text); - assert_eq!(serde_json::from_str::(&text).unwrap(), data); - } - - #[test] - fn definition() { - let data = Entry { - id: lsp_types::NumberOrString::Number(21), - data: Element::Edge(Edge::Item(Item::Definition(EdgeData { - in_v: lsp_types::NumberOrString::Number(18), - out_v: lsp_types::NumberOrString::Number(16), - }))), - }; - - let text = r#"{ "id": 21, "type": "edge", "label": "item", "property": "definition", "outV": 16, "inV": 18}"# - .replace(' ', ""); - - assert_eq!( - serde_json::from_str::(&text).unwrap(), - serde_json::to_value(&data).unwrap() - ); - } - - mod methods { - use super::*; - - #[test] - fn references() { - let data = Entry { - id: lsp_types::NumberOrString::Number(17), - data: Element::Edge(Edge::References(EdgeData { - in_v: lsp_types::NumberOrString::Number(16), - out_v: lsp_types::NumberOrString::Number(15), - })), - }; - - let text = r#"{ "id": 17, "type": "edge", "label": "textDocument/references", "outV": 15, "inV": 16 }"#; - - assert_eq!( - serde_json::from_str::(&text).unwrap(), - serde_json::to_value(&data).unwrap() - ); - } - - #[test] - fn definition() { - let data = Entry { - id: lsp_types::NumberOrString::Number(13), - data: Element::Vertex(Vertex::DefinitionResult { - result: DefinitionResultType::Scalar(LocationOrRangeId::RangeId( - lsp_types::NumberOrString::Number(7), - )), - }), - }; - - let text = - r#"{ "id": 13, "type": "vertex", "label": "definitionResult", "result": 7 }"#; - - assert_eq!( - serde_json::from_str::(&text).unwrap(), - serde_json::to_value(&data).unwrap() - ); - } - } -} From f2775ac2e955218321e9dce2ca0580e3e6e505bb Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Sat, 18 Sep 2021 22:14:47 +0430 Subject: [PATCH 06/10] reuse hover results with resultset --- crates/ide/src/lib.rs | 2 +- crates/ide/src/static_index.rs | 142 ++++++++++++++++++--------- crates/rust-analyzer/src/cli/lsif.rs | 123 ++++++++++++++--------- 3 files changed, 173 insertions(+), 94 deletions(-) diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 5b2384a054..d50680ce14 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -87,7 +87,7 @@ pub use crate::{ references::ReferenceSearchResult, rename::RenameError, runnables::{Runnable, RunnableKind, TestId}, - static_index::{StaticIndex, StaticIndexedFile, TokenStaticData}, + static_index::{StaticIndex, StaticIndexedFile, TokenStaticData, TokenId}, syntax_highlighting::{ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HlRange, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index ab7a829bca..bd71177990 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,31 +1,62 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF +use std::collections::HashMap; + use hir::{db::HirDatabase, Crate, Module}; -use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; +use ide_db::base_db::{FileId, SourceDatabaseExt}; use ide_db::RootDatabase; +use ide_db::defs::Definition; use rustc_hash::FxHashSet; use syntax::TextRange; use syntax::{AstNode, SyntaxKind::*, T}; +use crate::hover::{get_definition_of_token, hover_for_definition}; use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; /// A static representation of fully analyzed source code. /// /// The intended use-case is powering read-only code browsers and emitting LSIF -pub struct StaticIndex { +pub struct StaticIndex<'a> { pub files: Vec, + pub tokens: TokenStore, + analysis: &'a Analysis, + db: &'a RootDatabase, + def_map: HashMap, } pub struct TokenStaticData { - pub range: TextRange, pub hover: Option, } +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct TokenId(usize); + +#[derive(Default)] +pub struct TokenStore(Vec); + +impl TokenStore { + pub fn insert(&mut self, data: TokenStaticData) -> TokenId { + let id = TokenId(self.0.len()); + self.0.push(data); + id + } + + pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> { + self.0.get(id.0) + } + + pub fn iter(self) -> impl Iterator { + self.0.into_iter().enumerate().map(|(i, x)| { + (TokenId(i), x) + }) + } +} + pub struct StaticIndexedFile { pub file_id: FileId, pub folds: Vec, - pub tokens: Vec, + pub tokens: Vec<(TextRange, TokenId)>, } fn all_modules(db: &dyn HirDatabase) -> Vec { @@ -41,62 +72,81 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { modules } -impl StaticIndex { - pub fn compute(db: &RootDatabase, analysis: &Analysis) -> Cancellable { +impl StaticIndex<'_> { + fn add_file(&mut self, file_id: FileId) -> Cancellable<()> { + let folds = self.analysis.folding_ranges(file_id)?; + // hovers + let sema = hir::Semantics::new(self.db); + let tokens_or_nodes = sema.parse(file_id).syntax().clone(); + let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { + syntax::NodeOrToken::Node(_) => None, + syntax::NodeOrToken::Token(x) => Some(x), + }); + let hover_config = + HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; + let tokens = tokens + .filter(|token| match token.kind() { + IDENT + | INT_NUMBER + | LIFETIME_IDENT + | T![self] + | T![super] + | T![crate] => true, + _ => false, + }); + let mut result = StaticIndexedFile { + file_id, + folds, + tokens: vec![], + }; + for token in tokens { + let range = token.text_range(); + let node = token.parent().unwrap(); + let def = get_definition_of_token(self.db, &sema, &sema.descend_into_macros(token), file_id, range.start(), &mut None); + let def = if let Some(x) = def { + x + } else { + continue; + }; + let id = if let Some(x) = self.def_map.get(&def) { + *x + } else { + let x = self.tokens.insert(TokenStaticData { + hover: hover_for_definition(self.db, file_id, &sema, def, node, &hover_config), + }); + self.def_map.insert(def, x); + x + }; + result.tokens.push((range, id)); + } + self.files.push(result); + Ok(()) + } + + pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> Cancellable> { let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source(db).file_id.original_file(db); let source_root = db.file_source_root(file_id); let source_root = db.source_root(source_root); !source_root.is_library }); - + let mut this = StaticIndex { + files: vec![], + tokens: Default::default(), + analysis, db, + def_map: Default::default(), + }; let mut visited_files = FxHashSet::default(); - let mut result_files = Vec::::new(); for module in work { let file_id = module.definition_source(db).file_id.original_file(db); if visited_files.contains(&file_id) { continue; } - let folds = analysis.folding_ranges(file_id)?; - // hovers - let sema = hir::Semantics::new(db); - let tokens_or_nodes = sema.parse(file_id).syntax().clone(); - let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { - syntax::NodeOrToken::Node(_) => None, - syntax::NodeOrToken::Token(x) => Some(x), - }); - let hover_config = - HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; - let tokens = tokens - .filter(|token| match token.kind() { - IDENT - | INT_NUMBER - | LIFETIME_IDENT - | T![self] - | T![super] - | T![crate] - | T!['('] - | T![')'] => true, - _ => false, - }) - .map(|token| { - let range = token.text_range(); - let hover = analysis - .hover( - &hover_config, - FileRange { - file_id, - range: TextRange::new(range.start(), range.start()), - }, - )? - .map(|x| x.info); - Ok(TokenStaticData { range, hover }) - }) - .collect::, _>>()?; - result_files.push(StaticIndexedFile { file_id, folds, tokens }); + this.add_file(file_id)?; // mark the file visited_files.insert(file_id); } - Ok(StaticIndex { files: result_files }) + //eprintln!("{:#?}", token_map); + Ok(this) } } diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 9d7d48f0bb..509842516a 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -1,15 +1,16 @@ //! Lsif generator +use std::collections::HashMap; use std::env; use std::time::Instant; -use ide::{StaticIndex, StaticIndexedFile, TokenStaticData}; +use ide::{Analysis, Cancellable, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}; use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; use lsp_types::{lsif::*, Hover, HoverContents, NumberOrString}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; -use vfs::AbsPathBuf; +use vfs::{AbsPathBuf, Vfs}; use crate::cli::{ flags, @@ -27,9 +28,12 @@ impl Clone for Snap> { } } -#[derive(Default)] -struct LsifManager { +struct LsifManager<'a> { count: i32, + token_map: HashMap, + analysis: &'a Analysis, + db: &'a RootDatabase, + vfs: &'a Vfs, } #[derive(Clone, Copy)] @@ -41,7 +45,17 @@ impl From for NumberOrString { } } -impl LsifManager { +impl LsifManager<'_> { + fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> { + LsifManager { + count: 0, + token_map: HashMap::default(), + analysis, + db, + vfs, + } + } + fn add(&mut self, data: Element) -> Id { let id = Id(self.count); self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap()); @@ -54,33 +68,67 @@ impl LsifManager { println!("{}", data); } - fn add_tokens(&mut self, line_index: &LineIndex, doc_id: Id, tokens: Vec) { + fn add_token(&mut self, id: TokenId, token: TokenStaticData) { + let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None }))); + self.token_map.insert(id, result_set_id); + if let Some(hover) = token.hover { + let hover_id = self.add(Element::Vertex(Vertex::HoverResult { + result: Hover { + contents: HoverContents::Markup(to_proto::markup_content(hover.markup)), + range: None, + }, + })); + self.add(Element::Edge(Edge::Hover(EdgeData { + in_v: hover_id.into(), + out_v: result_set_id.into(), + }))); + } + } + + fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> { + let StaticIndexedFile { file_id, tokens, folds} = file; + let path = self.vfs.file_path(file_id); + let path = path.as_path().unwrap(); + let doc_id = self.add(Element::Vertex(Vertex::Document(Document { + language_id: "rust".to_string(), + uri: lsp_types::Url::from_file_path(path).unwrap(), + }))); + let text = self.analysis.file_text(file_id)?; + let line_index = self.db.line_index(file_id); + let line_index = LineIndex { + index: line_index.clone(), + encoding: OffsetEncoding::Utf16, + endings: LineEndings::Unix, + }; + let result = folds + .into_iter() + .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) + .collect(); + let folding_id = self.add(Element::Vertex(Vertex::FoldingRangeResult { result })); + self.add(Element::Edge(Edge::FoldingRange(EdgeData { + in_v: folding_id.into(), + out_v: doc_id.into(), + }))); let tokens_id = tokens .into_iter() - .map(|token| { - let token_id = self.add(Element::Vertex(Vertex::Range { - range: to_proto::range(line_index, token.range), + .map(|(range, id)| { + let range_id = self.add(Element::Vertex(Vertex::Range { + range: to_proto::range(&line_index, range), tag: None, })); - if let Some(hover) = token.hover { - let hover_id = self.add(Element::Vertex(Vertex::HoverResult { - result: Hover { - contents: HoverContents::Markup(to_proto::markup_content(hover.markup)), - range: None, - }, - })); - self.add(Element::Edge(Edge::Hover(EdgeData { - in_v: hover_id.into(), - out_v: token_id.into(), - }))); - } - token_id.into() + let result_set_id = *self.token_map.get(&id).expect("token map doesn't contain id"); + self.add(Element::Edge(Edge::Next(EdgeData { + in_v: result_set_id.into(), + out_v: range_id.into(), + }))); + range_id.into() }) .collect(); self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { in_vs: tokens_id, out_v: doc_id.into(), }))); + Ok(()) } } @@ -106,37 +154,18 @@ impl flags::Lsif { let si = StaticIndex::compute(db, &analysis)?; - let mut lsif = LsifManager::default(); + let mut lsif = LsifManager::new(&analysis, db, &vfs); lsif.add(Element::Vertex(Vertex::MetaData(MetaData { version: String::from("0.5.0"), project_root: lsp_types::Url::from_file_path(path).unwrap(), position_encoding: Encoding::Utf16, tool_info: None, }))); - for StaticIndexedFile { file_id, folds, tokens } in si.files { - let path = vfs.file_path(file_id); - let path = path.as_path().unwrap(); - let doc_id = lsif.add(Element::Vertex(Vertex::Document(Document { - language_id: "rust".to_string(), - uri: lsp_types::Url::from_file_path(path).unwrap(), - }))); - let text = analysis.file_text(file_id)?; - let line_index = db.line_index(file_id); - let line_index = LineIndex { - index: line_index.clone(), - encoding: OffsetEncoding::Utf16, - endings: LineEndings::Unix, - }; - let result = folds - .into_iter() - .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) - .collect(); - let folding_id = lsif.add(Element::Vertex(Vertex::FoldingRangeResult { result })); - lsif.add(Element::Edge(Edge::FoldingRange(EdgeData { - in_v: folding_id.into(), - out_v: doc_id.into(), - }))); - lsif.add_tokens(&line_index, doc_id, tokens); + for (id, token) in si.tokens.iter() { + lsif.add_token(id, token); + } + for file in si.files { + lsif.add_file(file)?; } eprintln!("Generating LSIF finished in {:?}", now.elapsed()); Ok(()) From 48bebeaa32d1e0eb53336b80f14d8695f3cdd30a Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Thu, 23 Sep 2021 16:28:21 +0330 Subject: [PATCH 07/10] support goto definition and find references --- .gitignore | 2 + crates/ide/src/lib.rs | 2 +- crates/ide/src/static_index.rs | 89 ++++++++++++++------- crates/rust-analyzer/src/cli/lsif.rs | 115 +++++++++++++++++++++++---- 4 files changed, 163 insertions(+), 45 deletions(-) diff --git a/.gitignore b/.gitignore index 7e097c0158..f3e3cab1d6 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,5 @@ generated_assists.adoc generated_features.adoc generated_diagnostic.adoc .DS_Store +/out/ +/dump.lsif diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index d50680ce14..dbfa99bdf2 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -87,7 +87,7 @@ pub use crate::{ references::ReferenceSearchResult, rename::RenameError, runnables::{Runnable, RunnableKind, TestId}, - static_index::{StaticIndex, StaticIndexedFile, TokenStaticData, TokenId}, + static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}, syntax_highlighting::{ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HlRange, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index bd71177990..55a6710fcf 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -3,15 +3,17 @@ use std::collections::HashMap; +use hir::Semantics; use hir::{db::HirDatabase, Crate, Module}; -use ide_db::base_db::{FileId, SourceDatabaseExt}; -use ide_db::RootDatabase; +use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; use ide_db::defs::Definition; +use ide_db::RootDatabase; use rustc_hash::FxHashSet; -use syntax::TextRange; use syntax::{AstNode, SyntaxKind::*, T}; +use syntax::{SyntaxToken, TextRange}; -use crate::hover::{get_definition_of_token, hover_for_definition}; +use crate::display::TryToNav; +use crate::hover::hover_for_definition; use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; /// A static representation of fully analyzed source code. @@ -25,8 +27,15 @@ pub struct StaticIndex<'a> { def_map: HashMap, } +pub struct ReferenceData { + pub range: FileRange, + pub is_definition: bool, +} + pub struct TokenStaticData { pub hover: Option, + pub definition: Option, + pub references: Vec, } #[derive(Clone, Copy, PartialEq, Eq, Hash)] @@ -42,14 +51,16 @@ impl TokenStore { id } + pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> { + self.0.get_mut(id.0) + } + pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> { self.0.get(id.0) } - - pub fn iter(self) -> impl Iterator { - self.0.into_iter().enumerate().map(|(i, x)| { - (TokenId(i), x) - }) + + pub fn iter(self) -> impl Iterator { + self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x)) } } @@ -84,26 +95,15 @@ impl StaticIndex<'_> { }); let hover_config = HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; - let tokens = tokens - .filter(|token| match token.kind() { - IDENT - | INT_NUMBER - | LIFETIME_IDENT - | T![self] - | T![super] - | T![crate] => true, - _ => false, - }); - let mut result = StaticIndexedFile { - file_id, - folds, - tokens: vec![], - }; + let tokens = tokens.filter(|token| match token.kind() { + IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true, + _ => false, + }); + let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] }; for token in tokens { let range = token.text_range(); let node = token.parent().unwrap(); - let def = get_definition_of_token(self.db, &sema, &sema.descend_into_macros(token), file_id, range.start(), &mut None); - let def = if let Some(x) = def { + let def = if let Some(x) = get_definition(&sema, token.clone()) { x } else { continue; @@ -112,18 +112,34 @@ impl StaticIndex<'_> { *x } else { let x = self.tokens.insert(TokenStaticData { - hover: hover_for_definition(self.db, file_id, &sema, def, node, &hover_config), + hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), + definition: def + .try_to_nav(self.db) + .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }), + references: vec![], }); self.def_map.insert(def, x); x }; + let token = self.tokens.get_mut(id).unwrap(); + token.references.push(ReferenceData { + range: FileRange { range, file_id }, + is_definition: if let Some(x) = def.try_to_nav(self.db) { + x.file_id == file_id && x.focus_or_full_range() == range + } else { + false + }, + }); result.tokens.push((range, id)); } self.files.push(result); Ok(()) } - - pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> Cancellable> { + + pub fn compute<'a>( + db: &'a RootDatabase, + analysis: &'a Analysis, + ) -> Cancellable> { let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source(db).file_id.original_file(db); let source_root = db.file_source_root(file_id); @@ -133,7 +149,8 @@ impl StaticIndex<'_> { let mut this = StaticIndex { files: vec![], tokens: Default::default(), - analysis, db, + analysis, + db, def_map: Default::default(), }; let mut visited_files = FxHashSet::default(); @@ -150,3 +167,15 @@ impl StaticIndex<'_> { Ok(this) } } + +fn get_definition(sema: &Semantics, token: SyntaxToken) -> Option { + for token in sema.descend_into_macros_many(token) { + let def = Definition::from_token(&sema, &token); + if let [x] = def.as_slice() { + return Some(*x); + } else { + continue; + }; + } + None +} diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 509842516a..f7be8374ca 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -4,7 +4,10 @@ use std::collections::HashMap; use std::env; use std::time::Instant; -use ide::{Analysis, Cancellable, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}; +use ide::{ + Analysis, Cancellable, FileId, FileRange, RootDatabase, StaticIndex, StaticIndexedFile, + TokenId, TokenStaticData, +}; use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; @@ -31,6 +34,8 @@ impl Clone for Snap> { struct LsifManager<'a> { count: i32, token_map: HashMap, + range_map: HashMap, + file_map: HashMap, analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs, @@ -50,12 +55,14 @@ impl LsifManager<'_> { LsifManager { count: 0, token_map: HashMap::default(), + range_map: HashMap::default(), + file_map: HashMap::default(), analysis, db, vfs, } } - + fn add(&mut self, data: Element) -> Id { let id = Id(self.count); self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap()); @@ -68,9 +75,54 @@ impl LsifManager<'_> { println!("{}", data); } - fn add_token(&mut self, id: TokenId, token: TokenStaticData) { + fn get_token_id(&mut self, id: TokenId) -> Id { + if let Some(x) = self.token_map.get(&id) { + return *x; + } let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None }))); self.token_map.insert(id, result_set_id); + result_set_id + } + + fn get_range_id(&mut self, id: FileRange) -> Cancellable { + if let Some(x) = self.range_map.get(&id) { + return Ok(*x); + } + let file_id = id.file_id; + let doc_id = self.get_file_id(file_id); + let line_index = self.db.line_index(file_id); + let line_index = LineIndex { + index: line_index.clone(), + encoding: OffsetEncoding::Utf16, + endings: LineEndings::Unix, + }; + let range_id = self.add(Element::Vertex(Vertex::Range { + range: to_proto::range(&line_index, id.range), + tag: None, + })); + self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { + in_vs: vec![range_id.into()], + out_v: doc_id.into(), + }))); + Ok(range_id) + } + + fn get_file_id(&mut self, id: FileId) -> Id { + if let Some(x) = self.file_map.get(&id) { + return *x; + } + let path = self.vfs.file_path(id); + let path = path.as_path().unwrap(); + let doc_id = self.add(Element::Vertex(Vertex::Document(Document { + language_id: "rust".to_string(), + uri: lsp_types::Url::from_file_path(path).unwrap(), + }))); + self.file_map.insert(id, doc_id); + doc_id + } + + fn add_token(&mut self, id: TokenId, token: TokenStaticData) -> Cancellable<()> { + let result_set_id = self.get_token_id(id); if let Some(hover) = token.hover { let hover_id = self.add(Element::Vertex(Vertex::HoverResult { result: Hover { @@ -83,16 +135,50 @@ impl LsifManager<'_> { out_v: result_set_id.into(), }))); } + if let Some(def) = token.definition { + let result_id = self.add(Element::Vertex(Vertex::DefinitionResult)); + let def_vertex = self.get_range_id(def)?; + self.add(Element::Edge(Edge::Item(Item { + document: (*self.file_map.get(&def.file_id).unwrap()).into(), + property: None, + edge_data: EdgeDataMultiIn { + in_vs: vec![def_vertex.into()], + out_v: result_id.into(), + }, + }))); + self.add(Element::Edge(Edge::Definition(EdgeData { + in_v: result_id.into(), + out_v: result_set_id.into(), + }))); + } + if !token.references.is_empty() { + let result_id = self.add(Element::Vertex(Vertex::ReferenceResult)); + self.add(Element::Edge(Edge::References(EdgeData { + in_v: result_id.into(), + out_v: result_set_id.into(), + }))); + for x in token.references { + let vertex = *self.range_map.get(&x.range).unwrap(); + self.add(Element::Edge(Edge::Item(Item { + document: (*self.file_map.get(&x.range.file_id).unwrap()).into(), + property: Some(if x.is_definition { + ItemKind::Definitions + } else { + ItemKind::References + }), + edge_data: EdgeDataMultiIn { + in_vs: vec![vertex.into()], + out_v: result_id.into(), + }, + }))); + } + } + Ok(()) } fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> { - let StaticIndexedFile { file_id, tokens, folds} = file; - let path = self.vfs.file_path(file_id); - let path = path.as_path().unwrap(); - let doc_id = self.add(Element::Vertex(Vertex::Document(Document { - language_id: "rust".to_string(), - uri: lsp_types::Url::from_file_path(path).unwrap(), - }))); + let StaticIndexedFile { file_id, tokens, folds } = file; + let doc_id = self.get_file_id(file_id); let text = self.analysis.file_text(file_id)?; let line_index = self.db.line_index(file_id); let line_index = LineIndex { @@ -116,7 +202,8 @@ impl LsifManager<'_> { range: to_proto::range(&line_index, range), tag: None, })); - let result_set_id = *self.token_map.get(&id).expect("token map doesn't contain id"); + self.range_map.insert(FileRange { file_id, range }, range_id); + let result_set_id = self.get_token_id(id); self.add(Element::Edge(Edge::Next(EdgeData { in_v: result_set_id.into(), out_v: range_id.into(), @@ -161,12 +248,12 @@ impl flags::Lsif { position_encoding: Encoding::Utf16, tool_info: None, }))); - for (id, token) in si.tokens.iter() { - lsif.add_token(id, token); - } for file in si.files { lsif.add_file(file)?; } + for (id, token) in si.tokens.iter() { + lsif.add_token(id, token)?; + } eprintln!("Generating LSIF finished in {:?}", now.elapsed()); Ok(()) } From 13d36e96c2e6ff0bb2b45b470f15fc96fcc67bbb Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Sun, 26 Sep 2021 10:39:23 +0330 Subject: [PATCH 08/10] use crates io version --- Cargo.lock | 5 +++-- crates/rust-analyzer/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2055512a9d..a2a21f02f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -862,8 +862,9 @@ dependencies = [ [[package]] name = "lsp-types" -version = "0.89.2" -source = "git+https://github.com/gluon-lang/lsp-types#6b43b1f7184ab379f0a6f89673bb10a8de11d12f" +version = "0.90.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7404037aab080771c90b0a499836d9d8a10336ecd07badf969567b65c6d51a1" dependencies = [ "bitflags", "serde", diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index f501679510..29a3fbb67e 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -22,7 +22,7 @@ crossbeam-channel = "0.5.0" dissimilar = "1.0.2" itertools = "0.10.0" jod-thread = "0.1.0" -lsp-types = { git = "https://github.com/gluon-lang/lsp-types", features = ["proposed"] } +lsp-types = { version = "0.90.0", features = ["proposed"] } parking_lot = "0.11.0" xflags = "0.2.1" oorandom = "11.1.2" From 7377120fee84743ad980ed87458bb08dd97f3101 Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Sun, 26 Sep 2021 12:47:57 +0330 Subject: [PATCH 09/10] add some tests --- crates/ide/src/fixture.rs | 19 ++++++++ crates/ide/src/static_index.rs | 80 ++++++++++++++++++++++++++++++++++ 2 files changed, 99 insertions(+) diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs index 700f4dc955..2ea6f6a9ab 100644 --- a/crates/ide/src/fixture.rs +++ b/crates/ide/src/fixture.rs @@ -66,3 +66,22 @@ pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(Fil .collect(); (host.analysis(), FilePosition { file_id, offset }, annotations) } + +/// Creates analysis from a multi-file fixture with annonations without $0 +pub(crate) fn annotations_without_marker(ra_fixture: &str) -> (Analysis, Vec<(FileRange, String)>) { + let mut host = AnalysisHost::default(); + let change_fixture = ChangeFixture::parse(ra_fixture); + host.db.set_enable_proc_attr_macros(true); + host.db.apply_change(change_fixture.change); + + let annotations = change_fixture + .files + .iter() + .flat_map(|&file_id| { + let file_text = host.analysis().file_text(file_id).unwrap(); + let annotations = extract_annotations(&file_text); + annotations.into_iter().map(move |(range, data)| (FileRange { file_id, range }, data)) + }) + .collect(); + (host.analysis(), annotations) +} diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 55a6710fcf..d467e794ce 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -179,3 +179,83 @@ fn get_definition(sema: &Semantics, token: SyntaxToken) -> Option< } None } + +#[cfg(test)] +mod tests { + use crate::{fixture, StaticIndex}; + use ide_db::base_db::FileRange; + use std::collections::HashSet; + + fn check_all_ranges(ra_fixture: &str) { + let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); + let s = StaticIndex::compute(&*analysis.db, &analysis).unwrap(); + let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); + for f in s.files { + for (range, _) in f.tokens { + let x = FileRange { file_id: f.file_id, range }; + if !range_set.contains(&x) { + panic!("additional range {:?}", x); + } + range_set.remove(&x); + } + } + if !range_set.is_empty() { + panic!("unfound ranges {:?}", range_set); + } + } + + fn check_definitions(ra_fixture: &str) { + let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); + let s = StaticIndex::compute(&*analysis.db, &analysis).unwrap(); + let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); + for (_, t) in s.tokens.iter() { + if let Some(x) = t.definition { + if !range_set.contains(&x) { + panic!("additional definition {:?}", x); + } + range_set.remove(&x); + } + } + if !range_set.is_empty() { + panic!("unfound definitions {:?}", range_set); + } + } + + #[test] + fn struct_and_enum() { + check_all_ranges( + r#" +struct Foo; + //^^^ +enum E { X(Foo) } + //^ ^ ^^^ +"#, + ); + check_definitions( + r#" +struct Foo; + //^^^ +enum E { X(Foo) } + //^ ^ +"#, + ); + } + + #[test] + fn derives() { + check_all_ranges( + r#" +#[rustc_builtin_macro] +pub macro Copy {} + //^^^^ +#[rustc_builtin_macro] +pub macro derive {} + //^^^^^^ +#[derive(Copy)] +//^^^^^^ ^^^^ +struct Hello(i32); + //^^^^^ ^^^ +"#, + ); + } +} From 5bd0f50111cce297b397c6549427e78c7c1da5c0 Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Wed, 29 Sep 2021 16:11:58 +0330 Subject: [PATCH 10/10] remove glob import and cancellables --- crates/ide/src/static_index.rs | 21 ++--- crates/rust-analyzer/src/cli/lsif.rs | 126 ++++++++++++++------------- 2 files changed, 75 insertions(+), 72 deletions(-) diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index d467e794ce..aa62e2eae5 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -14,7 +14,7 @@ use syntax::{SyntaxToken, TextRange}; use crate::display::TryToNav; use crate::hover::hover_for_definition; -use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; +use crate::{Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult}; /// A static representation of fully analyzed source code. /// @@ -84,8 +84,8 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { } impl StaticIndex<'_> { - fn add_file(&mut self, file_id: FileId) -> Cancellable<()> { - let folds = self.analysis.folding_ranges(file_id)?; + fn add_file(&mut self, file_id: FileId) { + let folds = self.analysis.folding_ranges(file_id).unwrap(); // hovers let sema = hir::Semantics::new(self.db); let tokens_or_nodes = sema.parse(file_id).syntax().clone(); @@ -133,13 +133,9 @@ impl StaticIndex<'_> { result.tokens.push((range, id)); } self.files.push(result); - Ok(()) } - pub fn compute<'a>( - db: &'a RootDatabase, - analysis: &'a Analysis, - ) -> Cancellable> { + pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> StaticIndex<'a> { let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source(db).file_id.original_file(db); let source_root = db.file_source_root(file_id); @@ -159,12 +155,11 @@ impl StaticIndex<'_> { if visited_files.contains(&file_id) { continue; } - this.add_file(file_id)?; + this.add_file(file_id); // mark the file visited_files.insert(file_id); } - //eprintln!("{:#?}", token_map); - Ok(this) + this } } @@ -188,7 +183,7 @@ mod tests { fn check_all_ranges(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = StaticIndex::compute(&*analysis.db, &analysis).unwrap(); + let s = StaticIndex::compute(&*analysis.db, &analysis); let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); for f in s.files { for (range, _) in f.tokens { @@ -206,7 +201,7 @@ mod tests { fn check_definitions(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = StaticIndex::compute(&*analysis.db, &analysis).unwrap(); + let s = StaticIndex::compute(&*analysis.db, &analysis); let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); for (_, t) in s.tokens.iter() { if let Some(x) = t.definition { diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index f7be8374ca..a235443146 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -1,17 +1,17 @@ -//! Lsif generator +//! LSIF (language server index format) generator use std::collections::HashMap; use std::env; use std::time::Instant; use ide::{ - Analysis, Cancellable, FileId, FileRange, RootDatabase, StaticIndex, StaticIndexedFile, - TokenId, TokenStaticData, + Analysis, FileId, FileRange, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, + TokenStaticData, }; use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; -use lsp_types::{lsif::*, Hover, HoverContents, NumberOrString}; +use lsp_types::{self, lsif}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use vfs::{AbsPathBuf, Vfs}; @@ -44,9 +44,9 @@ struct LsifManager<'a> { #[derive(Clone, Copy)] struct Id(i32); -impl From for NumberOrString { +impl From for lsp_types::NumberOrString { fn from(Id(x): Id) -> Self { - NumberOrString::Number(x) + lsp_types::NumberOrString::Number(x) } } @@ -63,13 +63,21 @@ impl LsifManager<'_> { } } - fn add(&mut self, data: Element) -> Id { + fn add(&mut self, data: lsif::Element) -> Id { let id = Id(self.count); - self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap()); + self.emit(&serde_json::to_string(&lsif::Entry { id: id.into(), data }).unwrap()); self.count += 1; id } + fn add_vertex(&mut self, vertex: lsif::Vertex) -> Id { + self.add(lsif::Element::Vertex(vertex)) + } + + fn add_edge(&mut self, edge: lsif::Edge) -> Id { + self.add(lsif::Element::Edge(edge)) + } + // FIXME: support file in addition to stdout here fn emit(&self, data: &str) { println!("{}", data); @@ -79,14 +87,14 @@ impl LsifManager<'_> { if let Some(x) = self.token_map.get(&id) { return *x; } - let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None }))); + let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None })); self.token_map.insert(id, result_set_id); result_set_id } - fn get_range_id(&mut self, id: FileRange) -> Cancellable { + fn get_range_id(&mut self, id: FileRange) -> Id { if let Some(x) = self.range_map.get(&id) { - return Ok(*x); + return *x; } let file_id = id.file_id; let doc_id = self.get_file_id(file_id); @@ -96,15 +104,15 @@ impl LsifManager<'_> { encoding: OffsetEncoding::Utf16, endings: LineEndings::Unix, }; - let range_id = self.add(Element::Vertex(Vertex::Range { + let range_id = self.add_vertex(lsif::Vertex::Range { range: to_proto::range(&line_index, id.range), tag: None, - })); - self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { + }); + self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn { in_vs: vec![range_id.into()], out_v: doc_id.into(), - }))); - Ok(range_id) + })); + range_id } fn get_file_id(&mut self, id: FileId) -> Id { @@ -113,73 +121,74 @@ impl LsifManager<'_> { } let path = self.vfs.file_path(id); let path = path.as_path().unwrap(); - let doc_id = self.add(Element::Vertex(Vertex::Document(Document { + let doc_id = self.add_vertex(lsif::Vertex::Document(lsif::Document { language_id: "rust".to_string(), uri: lsp_types::Url::from_file_path(path).unwrap(), - }))); + })); self.file_map.insert(id, doc_id); doc_id } - fn add_token(&mut self, id: TokenId, token: TokenStaticData) -> Cancellable<()> { + fn add_token(&mut self, id: TokenId, token: TokenStaticData) { let result_set_id = self.get_token_id(id); if let Some(hover) = token.hover { - let hover_id = self.add(Element::Vertex(Vertex::HoverResult { - result: Hover { - contents: HoverContents::Markup(to_proto::markup_content(hover.markup)), + let hover_id = self.add_vertex(lsif::Vertex::HoverResult { + result: lsp_types::Hover { + contents: lsp_types::HoverContents::Markup(to_proto::markup_content( + hover.markup, + )), range: None, }, - })); - self.add(Element::Edge(Edge::Hover(EdgeData { + }); + self.add_edge(lsif::Edge::Hover(lsif::EdgeData { in_v: hover_id.into(), out_v: result_set_id.into(), - }))); + })); } if let Some(def) = token.definition { - let result_id = self.add(Element::Vertex(Vertex::DefinitionResult)); - let def_vertex = self.get_range_id(def)?; - self.add(Element::Edge(Edge::Item(Item { + let result_id = self.add_vertex(lsif::Vertex::DefinitionResult); + let def_vertex = self.get_range_id(def); + self.add_edge(lsif::Edge::Item(lsif::Item { document: (*self.file_map.get(&def.file_id).unwrap()).into(), property: None, - edge_data: EdgeDataMultiIn { + edge_data: lsif::EdgeDataMultiIn { in_vs: vec![def_vertex.into()], out_v: result_id.into(), }, - }))); - self.add(Element::Edge(Edge::Definition(EdgeData { + })); + self.add_edge(lsif::Edge::Definition(lsif::EdgeData { in_v: result_id.into(), out_v: result_set_id.into(), - }))); + })); } if !token.references.is_empty() { - let result_id = self.add(Element::Vertex(Vertex::ReferenceResult)); - self.add(Element::Edge(Edge::References(EdgeData { + let result_id = self.add_vertex(lsif::Vertex::ReferenceResult); + self.add_edge(lsif::Edge::References(lsif::EdgeData { in_v: result_id.into(), out_v: result_set_id.into(), - }))); + })); for x in token.references { let vertex = *self.range_map.get(&x.range).unwrap(); - self.add(Element::Edge(Edge::Item(Item { + self.add_edge(lsif::Edge::Item(lsif::Item { document: (*self.file_map.get(&x.range.file_id).unwrap()).into(), property: Some(if x.is_definition { - ItemKind::Definitions + lsif::ItemKind::Definitions } else { - ItemKind::References + lsif::ItemKind::References }), - edge_data: EdgeDataMultiIn { + edge_data: lsif::EdgeDataMultiIn { in_vs: vec![vertex.into()], out_v: result_id.into(), }, - }))); + })); } } - Ok(()) } - fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> { + fn add_file(&mut self, file: StaticIndexedFile) { let StaticIndexedFile { file_id, tokens, folds } = file; let doc_id = self.get_file_id(file_id); - let text = self.analysis.file_text(file_id)?; + let text = self.analysis.file_text(file_id).unwrap(); let line_index = self.db.line_index(file_id); let line_index = LineIndex { index: line_index.clone(), @@ -190,32 +199,31 @@ impl LsifManager<'_> { .into_iter() .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) .collect(); - let folding_id = self.add(Element::Vertex(Vertex::FoldingRangeResult { result })); - self.add(Element::Edge(Edge::FoldingRange(EdgeData { + let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result }); + self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData { in_v: folding_id.into(), out_v: doc_id.into(), - }))); + })); let tokens_id = tokens .into_iter() .map(|(range, id)| { - let range_id = self.add(Element::Vertex(Vertex::Range { + let range_id = self.add_vertex(lsif::Vertex::Range { range: to_proto::range(&line_index, range), tag: None, - })); + }); self.range_map.insert(FileRange { file_id, range }, range_id); let result_set_id = self.get_token_id(id); - self.add(Element::Edge(Edge::Next(EdgeData { + self.add_edge(lsif::Edge::Next(lsif::EdgeData { in_v: result_set_id.into(), out_v: range_id.into(), - }))); + })); range_id.into() }) .collect(); - self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { + self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn { in_vs: tokens_id, out_v: doc_id.into(), - }))); - Ok(()) + })); } } @@ -239,20 +247,20 @@ impl flags::Lsif { let db = host.raw_database(); let analysis = host.analysis(); - let si = StaticIndex::compute(db, &analysis)?; + let si = StaticIndex::compute(db, &analysis); let mut lsif = LsifManager::new(&analysis, db, &vfs); - lsif.add(Element::Vertex(Vertex::MetaData(MetaData { + lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData { version: String::from("0.5.0"), project_root: lsp_types::Url::from_file_path(path).unwrap(), - position_encoding: Encoding::Utf16, + position_encoding: lsif::Encoding::Utf16, tool_info: None, - }))); + })); for file in si.files { - lsif.add_file(file)?; + lsif.add_file(file); } for (id, token) in si.tokens.iter() { - lsif.add_token(id, token)?; + lsif.add_token(id, token); } eprintln!("Generating LSIF finished in {:?}", now.elapsed()); Ok(())