mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-02 06:41:48 +00:00
⬆️ rust-analyzer
This commit is contained in:
parent
15b867b5db
commit
b2f6fd4f96
217 changed files with 12639 additions and 3059 deletions
Binary file not shown.
|
@ -12,7 +12,7 @@ use hir::{
|
|||
};
|
||||
use hir_def::{
|
||||
body::{BodySourceMap, SyntheticSyntax},
|
||||
expr::ExprId,
|
||||
expr::{ExprId, PatId},
|
||||
FunctionId,
|
||||
};
|
||||
use hir_ty::{Interner, TyExt, TypeFlags};
|
||||
|
@ -222,7 +222,11 @@ impl flags::AnalysisStats {
|
|||
let mut num_exprs = 0;
|
||||
let mut num_exprs_unknown = 0;
|
||||
let mut num_exprs_partially_unknown = 0;
|
||||
let mut num_type_mismatches = 0;
|
||||
let mut num_expr_type_mismatches = 0;
|
||||
let mut num_pats = 0;
|
||||
let mut num_pats_unknown = 0;
|
||||
let mut num_pats_partially_unknown = 0;
|
||||
let mut num_pat_type_mismatches = 0;
|
||||
let analysis = host.analysis();
|
||||
for f in funcs.iter().copied() {
|
||||
let name = f.name(db);
|
||||
|
@ -255,6 +259,8 @@ impl flags::AnalysisStats {
|
|||
let f_id = FunctionId::from(f);
|
||||
let (body, sm) = db.body_with_source_map(f_id.into());
|
||||
let inference_result = db.infer(f_id.into());
|
||||
|
||||
// region:expressions
|
||||
let (previous_exprs, previous_unknown, previous_partially_unknown) =
|
||||
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
|
||||
for (expr_id, _) in body.exprs.iter() {
|
||||
|
@ -307,12 +313,12 @@ impl flags::AnalysisStats {
|
|||
if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
|
||||
println!(
|
||||
r#"{},type,"{}""#,
|
||||
location_csv(db, &analysis, vfs, &sm, expr_id),
|
||||
location_csv_expr(db, &analysis, vfs, &sm, expr_id),
|
||||
ty.display(db)
|
||||
);
|
||||
}
|
||||
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
|
||||
num_type_mismatches += 1;
|
||||
num_expr_type_mismatches += 1;
|
||||
if verbosity.is_verbose() {
|
||||
if let Some((path, start, end)) =
|
||||
expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
|
||||
|
@ -339,7 +345,7 @@ impl flags::AnalysisStats {
|
|||
if self.output == Some(OutputFormat::Csv) {
|
||||
println!(
|
||||
r#"{},mismatch,"{}","{}""#,
|
||||
location_csv(db, &analysis, vfs, &sm, expr_id),
|
||||
location_csv_expr(db, &analysis, vfs, &sm, expr_id),
|
||||
mismatch.expected.display(db),
|
||||
mismatch.actual.display(db)
|
||||
);
|
||||
|
@ -355,6 +361,109 @@ impl flags::AnalysisStats {
|
|||
num_exprs_partially_unknown - previous_partially_unknown
|
||||
));
|
||||
}
|
||||
// endregion:expressions
|
||||
|
||||
// region:patterns
|
||||
let (previous_pats, previous_unknown, previous_partially_unknown) =
|
||||
(num_pats, num_pats_unknown, num_pats_partially_unknown);
|
||||
for (pat_id, _) in body.pats.iter() {
|
||||
let ty = &inference_result[pat_id];
|
||||
num_pats += 1;
|
||||
let unknown_or_partial = if ty.is_unknown() {
|
||||
num_pats_unknown += 1;
|
||||
if verbosity.is_spammy() {
|
||||
if let Some((path, start, end)) =
|
||||
pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
|
||||
{
|
||||
bar.println(format!(
|
||||
"{} {}:{}-{}:{}: Unknown type",
|
||||
path,
|
||||
start.line + 1,
|
||||
start.col,
|
||||
end.line + 1,
|
||||
end.col,
|
||||
));
|
||||
} else {
|
||||
bar.println(format!("{name}: Unknown type",));
|
||||
}
|
||||
}
|
||||
true
|
||||
} else {
|
||||
let is_partially_unknown =
|
||||
ty.data(Interner).flags.contains(TypeFlags::HAS_ERROR);
|
||||
if is_partially_unknown {
|
||||
num_pats_partially_unknown += 1;
|
||||
}
|
||||
is_partially_unknown
|
||||
};
|
||||
if self.only.is_some() && verbosity.is_spammy() {
|
||||
// in super-verbose mode for just one function, we print every single pattern
|
||||
if let Some((_, start, end)) = pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
|
||||
{
|
||||
bar.println(format!(
|
||||
"{}:{}-{}:{}: {}",
|
||||
start.line + 1,
|
||||
start.col,
|
||||
end.line + 1,
|
||||
end.col,
|
||||
ty.display(db)
|
||||
));
|
||||
} else {
|
||||
bar.println(format!("unknown location: {}", ty.display(db)));
|
||||
}
|
||||
}
|
||||
if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
|
||||
println!(
|
||||
r#"{},type,"{}""#,
|
||||
location_csv_pat(db, &analysis, vfs, &sm, pat_id),
|
||||
ty.display(db)
|
||||
);
|
||||
}
|
||||
if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) {
|
||||
num_pat_type_mismatches += 1;
|
||||
if verbosity.is_verbose() {
|
||||
if let Some((path, start, end)) =
|
||||
pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
|
||||
{
|
||||
bar.println(format!(
|
||||
"{} {}:{}-{}:{}: Expected {}, got {}",
|
||||
path,
|
||||
start.line + 1,
|
||||
start.col,
|
||||
end.line + 1,
|
||||
end.col,
|
||||
mismatch.expected.display(db),
|
||||
mismatch.actual.display(db)
|
||||
));
|
||||
} else {
|
||||
bar.println(format!(
|
||||
"{}: Expected {}, got {}",
|
||||
name,
|
||||
mismatch.expected.display(db),
|
||||
mismatch.actual.display(db)
|
||||
));
|
||||
}
|
||||
}
|
||||
if self.output == Some(OutputFormat::Csv) {
|
||||
println!(
|
||||
r#"{},mismatch,"{}","{}""#,
|
||||
location_csv_pat(db, &analysis, vfs, &sm, pat_id),
|
||||
mismatch.expected.display(db),
|
||||
mismatch.actual.display(db)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if verbosity.is_spammy() {
|
||||
bar.println(format!(
|
||||
"In {}: {} pats, {} unknown, {} partial",
|
||||
full_name,
|
||||
num_pats - previous_pats,
|
||||
num_pats_unknown - previous_unknown,
|
||||
num_pats_partially_unknown - previous_partially_unknown
|
||||
));
|
||||
}
|
||||
// endregion:patterns
|
||||
bar.inc(1);
|
||||
}
|
||||
|
||||
|
@ -366,10 +475,21 @@ impl flags::AnalysisStats {
|
|||
percentage(num_exprs_unknown, num_exprs),
|
||||
num_exprs_partially_unknown,
|
||||
percentage(num_exprs_partially_unknown, num_exprs),
|
||||
num_type_mismatches
|
||||
num_expr_type_mismatches
|
||||
);
|
||||
eprintln!(
|
||||
" pats: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
|
||||
num_pats,
|
||||
num_pats_unknown,
|
||||
percentage(num_pats_unknown, num_pats),
|
||||
num_pats_partially_unknown,
|
||||
percentage(num_pats_partially_unknown, num_pats),
|
||||
num_pat_type_mismatches
|
||||
);
|
||||
report_metric("unknown type", num_exprs_unknown, "#");
|
||||
report_metric("type mismatches", num_type_mismatches, "#");
|
||||
report_metric("type mismatches", num_expr_type_mismatches, "#");
|
||||
report_metric("pattern unknown type", num_pats_unknown, "#");
|
||||
report_metric("pattern type mismatches", num_pat_type_mismatches, "#");
|
||||
|
||||
eprintln!("{:<20} {}", "Inference:", inference_sw.elapsed());
|
||||
}
|
||||
|
@ -379,7 +499,7 @@ impl flags::AnalysisStats {
|
|||
}
|
||||
}
|
||||
|
||||
fn location_csv(
|
||||
fn location_csv_expr(
|
||||
db: &RootDatabase,
|
||||
analysis: &Analysis,
|
||||
vfs: &Vfs,
|
||||
|
@ -401,6 +521,30 @@ fn location_csv(
|
|||
format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
|
||||
}
|
||||
|
||||
fn location_csv_pat(
|
||||
db: &RootDatabase,
|
||||
analysis: &Analysis,
|
||||
vfs: &Vfs,
|
||||
sm: &BodySourceMap,
|
||||
pat_id: PatId,
|
||||
) -> String {
|
||||
let src = match sm.pat_syntax(pat_id) {
|
||||
Ok(s) => s,
|
||||
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
|
||||
};
|
||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
||||
let node = src.map(|e| {
|
||||
e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone())
|
||||
});
|
||||
let original_range = node.as_ref().original_file_range(db);
|
||||
let path = vfs.file_path(original_range.file_id);
|
||||
let line_index = analysis.file_line_index(original_range.file_id).unwrap();
|
||||
let text_range = original_range.range;
|
||||
let (start, end) =
|
||||
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
|
||||
format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
|
||||
}
|
||||
|
||||
fn expr_syntax_range(
|
||||
db: &RootDatabase,
|
||||
analysis: &Analysis,
|
||||
|
@ -423,6 +567,33 @@ fn expr_syntax_range(
|
|||
None
|
||||
}
|
||||
}
|
||||
fn pat_syntax_range(
|
||||
db: &RootDatabase,
|
||||
analysis: &Analysis,
|
||||
vfs: &Vfs,
|
||||
sm: &BodySourceMap,
|
||||
pat_id: PatId,
|
||||
) -> Option<(VfsPath, LineCol, LineCol)> {
|
||||
let src = sm.pat_syntax(pat_id);
|
||||
if let Ok(src) = src {
|
||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
||||
let node = src.map(|e| {
|
||||
e.either(
|
||||
|it| it.to_node(&root).syntax().clone(),
|
||||
|it| it.to_node(&root).syntax().clone(),
|
||||
)
|
||||
});
|
||||
let original_range = node.as_ref().original_file_range(db);
|
||||
let path = vfs.file_path(original_range.file_id);
|
||||
let line_index = analysis.file_line_index(original_range.file_id).unwrap();
|
||||
let text_range = original_range.range;
|
||||
let (start, end) =
|
||||
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
|
||||
Some((path, start, end))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
|
||||
for i in 0..slice.len() {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
//! Analyze all modules in a project for diagnostics. Exits with a non-zero
|
||||
//! status code if any errors are found.
|
||||
|
||||
use project_model::{CargoConfig, RustcSource};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use hir::{db::HirDatabase, Crate, Module};
|
||||
|
@ -14,7 +15,8 @@ use crate::cli::{
|
|||
|
||||
impl flags::Diagnostics {
|
||||
pub fn run(self) -> anyhow::Result<()> {
|
||||
let cargo_config = Default::default();
|
||||
let mut cargo_config = CargoConfig::default();
|
||||
cargo_config.sysroot = Some(RustcSource::Discover);
|
||||
let load_cargo_config = LoadCargoConfig {
|
||||
load_out_dirs_from_check: !self.disable_build_scripts,
|
||||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||
|
|
|
@ -13,7 +13,7 @@ use ide_db::LineIndexDatabase;
|
|||
use ide_db::base_db::salsa::{self, ParallelDatabase};
|
||||
use ide_db::line_index::WideEncoding;
|
||||
use lsp_types::{self, lsif};
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
|
||||
use vfs::{AbsPathBuf, Vfs};
|
||||
|
||||
use crate::cli::load_cargo::ProcMacroServerChoice;
|
||||
|
@ -289,7 +289,8 @@ impl flags::Lsif {
|
|||
pub fn run(self) -> Result<()> {
|
||||
eprintln!("Generating LSIF started...");
|
||||
let now = Instant::now();
|
||||
let cargo_config = CargoConfig::default();
|
||||
let mut cargo_config = CargoConfig::default();
|
||||
cargo_config.sysroot = Some(RustcSource::Discover);
|
||||
let no_progress = &|_| ();
|
||||
let load_cargo_config = LoadCargoConfig {
|
||||
load_out_dirs_from_check: true,
|
||||
|
|
|
@ -15,7 +15,7 @@ use ide::{
|
|||
TokenStaticData,
|
||||
};
|
||||
use ide_db::LineIndexDatabase;
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
|
||||
use scip::types as scip_types;
|
||||
use std::env;
|
||||
|
||||
|
@ -29,7 +29,8 @@ impl flags::Scip {
|
|||
pub fn run(self) -> Result<()> {
|
||||
eprintln!("Generating SCIP start...");
|
||||
let now = Instant::now();
|
||||
let cargo_config = CargoConfig::default();
|
||||
let mut cargo_config = CargoConfig::default();
|
||||
cargo_config.sysroot = Some(RustcSource::Discover);
|
||||
|
||||
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
|
||||
let load_cargo_config = LoadCargoConfig {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Applies structured search replace rules from the command line.
|
||||
|
||||
use ide_ssr::MatchFinder;
|
||||
use project_model::CargoConfig;
|
||||
use project_model::{CargoConfig, RustcSource};
|
||||
|
||||
use crate::cli::{
|
||||
flags,
|
||||
|
@ -12,7 +12,8 @@ use crate::cli::{
|
|||
impl flags::Ssr {
|
||||
pub fn run(self) -> Result<()> {
|
||||
use ide_db::base_db::SourceDatabaseExt;
|
||||
let cargo_config = CargoConfig::default();
|
||||
let mut cargo_config = CargoConfig::default();
|
||||
cargo_config.sysroot = Some(RustcSource::Discover);
|
||||
let load_cargo_config = LoadCargoConfig {
|
||||
load_out_dirs_from_check: true,
|
||||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||
|
|
|
@ -101,6 +101,8 @@ config_data! {
|
|||
/// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
|
||||
/// avoid checking unnecessary things.
|
||||
cargo_buildScripts_useRustcWrapper: bool = "true",
|
||||
/// Extra arguments that are passed to every cargo invocation.
|
||||
cargo_extraArgs: Vec<String> = "[]",
|
||||
/// Extra environment variables that will be set when running cargo, rustc
|
||||
/// or other commands within the workspace. Useful for setting RUSTFLAGS.
|
||||
cargo_extraEnv: FxHashMap<String, String> = "{}",
|
||||
|
@ -366,6 +368,8 @@ config_data! {
|
|||
inlayHints_typeHints_hideClosureInitialization: bool = "false",
|
||||
/// Whether to hide inlay type hints for constructors.
|
||||
inlayHints_typeHints_hideNamedConstructor: bool = "false",
|
||||
/// Enables the experimental support for interpreting tests.
|
||||
interpret_tests: bool = "false",
|
||||
|
||||
/// Join lines merges consecutive declaration and initialization of an assignment.
|
||||
joinLines_joinAssignments: bool = "true",
|
||||
|
@ -456,7 +460,10 @@ config_data! {
|
|||
/// Additional arguments to `rustfmt`.
|
||||
rustfmt_extraArgs: Vec<String> = "[]",
|
||||
/// Advanced option, fully override the command rust-analyzer uses for
|
||||
/// formatting.
|
||||
/// formatting. This should be the equivalent of `rustfmt` here, and
|
||||
/// not that of `cargo fmt`. The file contents will be passed on the
|
||||
/// standard input and the formatted result will be read from the
|
||||
/// standard output.
|
||||
rustfmt_overrideCommand: Option<Vec<String>> = "null",
|
||||
/// Enables the use of rustfmt's unstable range formatting command for the
|
||||
/// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
|
||||
|
@ -849,27 +856,27 @@ impl Config {
|
|||
}
|
||||
pub fn linked_projects(&self) -> Vec<LinkedProject> {
|
||||
match self.data.linkedProjects.as_slice() {
|
||||
[] => match self.discovered_projects.as_ref() {
|
||||
Some(discovered_projects) => {
|
||||
let exclude_dirs: Vec<_> = self
|
||||
.data
|
||||
.files_excludeDirs
|
||||
[] => {
|
||||
match self.discovered_projects.as_ref() {
|
||||
Some(discovered_projects) => {
|
||||
let exclude_dirs: Vec<_> = self
|
||||
.data
|
||||
.files_excludeDirs
|
||||
.iter()
|
||||
.map(|p| self.root_path.join(p))
|
||||
.collect();
|
||||
discovered_projects
|
||||
.iter()
|
||||
.map(|p| self.root_path.join(p))
|
||||
.collect();
|
||||
discovered_projects
|
||||
.iter()
|
||||
.filter(|p| {
|
||||
let (ProjectManifest::ProjectJson(path)
|
||||
| ProjectManifest::CargoToml(path)) = p;
|
||||
.filter(|(ProjectManifest::ProjectJson(path) | ProjectManifest::CargoToml(path))| {
|
||||
!exclude_dirs.iter().any(|p| path.starts_with(p))
|
||||
})
|
||||
.cloned()
|
||||
.map(LinkedProject::from)
|
||||
.collect()
|
||||
}
|
||||
None => Vec::new(),
|
||||
}
|
||||
None => Vec::new(),
|
||||
},
|
||||
}
|
||||
linked_projects => linked_projects
|
||||
.iter()
|
||||
.filter_map(|linked_project| match linked_project {
|
||||
|
@ -1050,10 +1057,20 @@ impl Config {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn extra_args(&self) -> &Vec<String> {
|
||||
&self.data.cargo_extraArgs
|
||||
}
|
||||
|
||||
pub fn extra_env(&self) -> &FxHashMap<String, String> {
|
||||
&self.data.cargo_extraEnv
|
||||
}
|
||||
|
||||
pub fn check_extra_args(&self) -> Vec<String> {
|
||||
let mut extra_args = self.extra_args().clone();
|
||||
extra_args.extend_from_slice(&self.data.check_extraArgs);
|
||||
extra_args
|
||||
}
|
||||
|
||||
pub fn check_extra_env(&self) -> FxHashMap<String, String> {
|
||||
let mut extra_env = self.data.cargo_extraEnv.clone();
|
||||
extra_env.extend(self.data.check_extraEnv.clone());
|
||||
|
@ -1152,6 +1169,7 @@ impl Config {
|
|||
InvocationLocation::Workspace => project_model::InvocationLocation::Workspace,
|
||||
},
|
||||
run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
|
||||
extra_args: self.data.cargo_extraArgs.clone(),
|
||||
extra_env: self.data.cargo_extraEnv.clone(),
|
||||
}
|
||||
}
|
||||
|
@ -1222,7 +1240,7 @@ impl Config {
|
|||
CargoFeaturesDef::All => vec![],
|
||||
CargoFeaturesDef::Selected(it) => it,
|
||||
},
|
||||
extra_args: self.data.check_extraArgs.clone(),
|
||||
extra_args: self.check_extra_args(),
|
||||
extra_env: self.check_extra_env(),
|
||||
ansi_color_output: self.color_diagnostic_output(),
|
||||
},
|
||||
|
@ -1441,6 +1459,7 @@ impl Config {
|
|||
}
|
||||
},
|
||||
keywords: self.data.hover_documentation_keywords_enable,
|
||||
interpret_tests: self.data.interpret_tests,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -134,6 +134,16 @@ pub(crate) fn handle_view_hir(
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn handle_view_mir(
|
||||
snap: GlobalStateSnapshot,
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> Result<String> {
|
||||
let _p = profile::span("handle_view_mir");
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let res = snap.analysis.view_mir(position)?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn handle_view_file_text(
|
||||
snap: GlobalStateSnapshot,
|
||||
params: lsp_types::TextDocumentIdentifier,
|
||||
|
|
|
@ -74,6 +74,14 @@ impl Request for ViewHir {
|
|||
const METHOD: &'static str = "rust-analyzer/viewHir";
|
||||
}
|
||||
|
||||
pub enum ViewMir {}
|
||||
|
||||
impl Request for ViewMir {
|
||||
type Params = lsp_types::TextDocumentPositionParams;
|
||||
type Result = String;
|
||||
const METHOD: &'static str = "rust-analyzer/viewMir";
|
||||
}
|
||||
|
||||
pub enum ViewFileText {}
|
||||
|
||||
impl Request for ViewFileText {
|
||||
|
|
|
@ -111,12 +111,7 @@ impl fmt::Debug for Event {
|
|||
|
||||
impl GlobalState {
|
||||
fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
|
||||
if self.config.linked_projects().is_empty()
|
||||
&& self.config.detached_files().is_empty()
|
||||
&& self.config.notifications().cargo_toml_not_found
|
||||
{
|
||||
self.show_and_log_error("rust-analyzer failed to discover workspace".to_string(), None);
|
||||
};
|
||||
self.update_status_or_notify();
|
||||
|
||||
if self.config.did_save_text_document_dynamic_registration() {
|
||||
let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
|
||||
|
@ -323,17 +318,6 @@ impl GlobalState {
|
|||
|
||||
if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
|
||||
for file_id in diagnostic_changes {
|
||||
let db = self.analysis_host.raw_database();
|
||||
let source_root = db.file_source_root(file_id);
|
||||
if db.source_root(source_root).is_library {
|
||||
// Only publish diagnostics for files in the workspace, not from crates.io deps
|
||||
// or the sysroot.
|
||||
// While theoretically these should never have errors, we have quite a few false
|
||||
// positives particularly in the stdlib, and those diagnostics would stay around
|
||||
// forever if we emitted them here.
|
||||
continue;
|
||||
}
|
||||
|
||||
let uri = file_id_to_url(&self.vfs.read().0, file_id);
|
||||
let mut diagnostics =
|
||||
self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
|
||||
|
@ -405,18 +389,7 @@ impl GlobalState {
|
|||
});
|
||||
}
|
||||
|
||||
let status = self.current_status();
|
||||
if self.last_reported_status.as_ref() != Some(&status) {
|
||||
self.last_reported_status = Some(status.clone());
|
||||
|
||||
if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
|
||||
self.show_message(lsp_types::MessageType::ERROR, message.clone());
|
||||
}
|
||||
|
||||
if self.config.server_status_notification() {
|
||||
self.send_notification::<lsp_ext::ServerStatusNotification>(status);
|
||||
}
|
||||
}
|
||||
self.update_status_or_notify();
|
||||
|
||||
let loop_duration = loop_start.elapsed();
|
||||
if loop_duration > Duration::from_millis(100) && was_quiescent {
|
||||
|
@ -426,6 +399,20 @@ impl GlobalState {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn update_status_or_notify(&mut self) {
|
||||
let status = self.current_status();
|
||||
if self.last_reported_status.as_ref() != Some(&status) {
|
||||
self.last_reported_status = Some(status.clone());
|
||||
|
||||
if self.config.server_status_notification() {
|
||||
self.send_notification::<lsp_ext::ServerStatusNotification>(status);
|
||||
} else if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message)
|
||||
{
|
||||
self.show_and_log_error(message.clone(), None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) {
|
||||
match task {
|
||||
Task::Response(response) => self.respond(response),
|
||||
|
@ -456,6 +443,9 @@ impl GlobalState {
|
|||
ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
|
||||
ProjectWorkspaceProgress::End(workspaces) => {
|
||||
self.fetch_workspaces_queue.op_completed(Some(workspaces));
|
||||
if let Err(e) = self.fetch_workspace_error() {
|
||||
tracing::error!("FetchWorkspaceError:\n{e}");
|
||||
}
|
||||
|
||||
let old = Arc::clone(&self.workspaces);
|
||||
self.switch_workspaces("fetched workspace".to_string());
|
||||
|
@ -477,6 +467,9 @@ impl GlobalState {
|
|||
BuildDataProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
|
||||
BuildDataProgress::End(build_data_result) => {
|
||||
self.fetch_build_data_queue.op_completed(build_data_result);
|
||||
if let Err(e) = self.fetch_build_data_error() {
|
||||
tracing::error!("FetchBuildDataError:\n{e}");
|
||||
}
|
||||
|
||||
self.switch_workspaces("fetched build data".to_string());
|
||||
|
||||
|
@ -509,6 +502,7 @@ impl GlobalState {
|
|||
self.vfs_progress_n_total = n_total;
|
||||
self.vfs_progress_n_done = n_done;
|
||||
|
||||
// if n_total != 0 {
|
||||
let state = if n_done == 0 {
|
||||
Progress::Begin
|
||||
} else if n_done < n_total {
|
||||
|
@ -523,7 +517,8 @@ impl GlobalState {
|
|||
Some(format!("{n_done}/{n_total}")),
|
||||
Some(Progress::fraction(n_done, n_total)),
|
||||
None,
|
||||
)
|
||||
);
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -565,7 +560,10 @@ impl GlobalState {
|
|||
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
|
||||
flycheck::Progress::DidCancel => (Progress::End, None),
|
||||
flycheck::Progress::DidFailToRestart(err) => {
|
||||
self.show_and_log_error("cargo check failed".to_string(), Some(err));
|
||||
self.show_and_log_error(
|
||||
"cargo check failed to start".to_string(),
|
||||
Some(err),
|
||||
);
|
||||
return;
|
||||
}
|
||||
flycheck::Progress::DidFinish(result) => {
|
||||
|
@ -634,6 +632,7 @@ impl GlobalState {
|
|||
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
|
||||
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
|
||||
.on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
|
||||
.on::<lsp_ext::ViewMir>(handlers::handle_view_mir)
|
||||
.on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
|
||||
.on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
|
||||
.on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
|
||||
|
@ -971,10 +970,20 @@ impl GlobalState {
|
|||
}
|
||||
|
||||
fn update_diagnostics(&mut self) {
|
||||
let db = self.analysis_host.raw_database();
|
||||
let subscriptions = self
|
||||
.mem_docs
|
||||
.iter()
|
||||
.map(|path| self.vfs.read().0.file_id(path).unwrap())
|
||||
.filter(|&file_id| {
|
||||
let source_root = db.file_source_root(file_id);
|
||||
// Only publish diagnostics for files in the workspace, not from crates.io deps
|
||||
// or the sysroot.
|
||||
// While theoretically these should never have errors, we have quite a few false
|
||||
// positives particularly in the stdlib, and those diagnostics would stay around
|
||||
// forever if we emitted them here.
|
||||
!db.source_root(source_root).is_library
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
tracing::trace!("updating notifications for {:?}", subscriptions);
|
||||
|
|
|
@ -12,17 +12,21 @@
|
|||
//! correct. Instead, we try to provide a best-effort service. Even if the
|
||||
//! project is currently loading and we don't have a full project model, we
|
||||
//! still want to respond to various requests.
|
||||
use std::{mem, sync::Arc};
|
||||
use std::{collections::hash_map::Entry, mem, sync::Arc};
|
||||
|
||||
use flycheck::{FlycheckConfig, FlycheckHandle};
|
||||
use hir::db::DefDatabase;
|
||||
use ide::Change;
|
||||
use ide_db::base_db::{
|
||||
CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
|
||||
ProcMacroLoadResult, SourceRoot, VfsPath,
|
||||
use ide_db::{
|
||||
base_db::{
|
||||
CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
|
||||
ProcMacroLoadResult, SourceRoot, VfsPath,
|
||||
},
|
||||
FxHashMap,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use proc_macro_api::{MacroDylib, ProcMacroServer};
|
||||
use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
|
||||
use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts};
|
||||
use syntax::SmolStr;
|
||||
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
|
||||
|
||||
|
@ -52,7 +56,8 @@ pub(crate) enum BuildDataProgress {
|
|||
|
||||
impl GlobalState {
|
||||
pub(crate) fn is_quiescent(&self) -> bool {
|
||||
!(self.fetch_workspaces_queue.op_in_progress()
|
||||
!(self.last_reported_status.is_none()
|
||||
|| self.fetch_workspaces_queue.op_in_progress()
|
||||
|| self.fetch_build_data_queue.op_in_progress()
|
||||
|| self.vfs_progress_config_version < self.vfs_config_version
|
||||
|| self.vfs_progress_n_done < self.vfs_progress_n_total)
|
||||
|
@ -104,9 +109,9 @@ impl GlobalState {
|
|||
status.message = Some("Workspace reload required".to_string())
|
||||
}
|
||||
|
||||
if let Err(error) = self.fetch_workspace_error() {
|
||||
if let Err(_) = self.fetch_workspace_error() {
|
||||
status.health = lsp_ext::Health::Error;
|
||||
status.message = Some(error)
|
||||
status.message = Some("Failed to load workspaces".to_string())
|
||||
}
|
||||
|
||||
if self.config.linked_projects().is_empty()
|
||||
|
@ -114,8 +119,9 @@ impl GlobalState {
|
|||
&& self.config.notifications().cargo_toml_not_found
|
||||
{
|
||||
status.health = lsp_ext::Health::Warning;
|
||||
status.message = Some("Workspace reload required".to_string())
|
||||
status.message = Some("Failed to discover workspace".to_string())
|
||||
}
|
||||
|
||||
status
|
||||
}
|
||||
|
||||
|
@ -197,8 +203,7 @@ impl GlobalState {
|
|||
let _p = profile::span("GlobalState::switch_workspaces");
|
||||
tracing::info!(%cause, "will switch workspaces");
|
||||
|
||||
if let Err(error_message) = self.fetch_workspace_error() {
|
||||
self.show_and_log_error(error_message, None);
|
||||
if let Err(_) = self.fetch_workspace_error() {
|
||||
if !self.workspaces.is_empty() {
|
||||
// It only makes sense to switch to a partially broken workspace
|
||||
// if we don't have any workspace at all yet.
|
||||
|
@ -206,10 +211,6 @@ impl GlobalState {
|
|||
}
|
||||
}
|
||||
|
||||
if let Err(error) = self.fetch_build_data_error() {
|
||||
self.show_and_log_error("failed to run build scripts".to_string(), Some(error));
|
||||
}
|
||||
|
||||
let Some(workspaces) = self.fetch_workspaces_queue.last_op_result() else { return; };
|
||||
let workspaces =
|
||||
workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
|
||||
|
@ -388,7 +389,7 @@ impl GlobalState {
|
|||
tracing::info!("did switch workspaces");
|
||||
}
|
||||
|
||||
fn fetch_workspace_error(&self) -> Result<(), String> {
|
||||
pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
|
||||
let mut buf = String::new();
|
||||
|
||||
let Some(last_op_result) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
|
||||
|
@ -409,7 +410,7 @@ impl GlobalState {
|
|||
Err(buf)
|
||||
}
|
||||
|
||||
fn fetch_build_data_error(&self) -> Result<(), String> {
|
||||
pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
|
||||
let mut buf = String::new();
|
||||
|
||||
for ws in &self.fetch_build_data_queue.last_op_result().1 {
|
||||
|
@ -494,7 +495,69 @@ impl ProjectFolders {
|
|||
let mut fsc = FileSetConfig::builder();
|
||||
let mut local_filesets = vec![];
|
||||
|
||||
for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
|
||||
// Dedup source roots
|
||||
// Depending on the project setup, we can have duplicated source roots, or for example in
|
||||
// the case of the rustc workspace, we can end up with two source roots that are almost the
|
||||
// same but not quite, like:
|
||||
// PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
|
||||
// PackageRoot {
|
||||
// is_local: true,
|
||||
// include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
|
||||
// exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
|
||||
// }
|
||||
//
|
||||
// The first one comes from the explicit rustc workspace which points to the rustc workspace itself
|
||||
// The second comes from the rustc workspace that we load as the actual project workspace
|
||||
// These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
|
||||
// So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
|
||||
// so we need to also coalesce the includes if they overlap.
|
||||
|
||||
let mut roots: Vec<_> = workspaces
|
||||
.iter()
|
||||
.flat_map(|ws| ws.to_roots())
|
||||
.update(|root| root.include.sort())
|
||||
.sorted_by(|a, b| a.include.cmp(&b.include))
|
||||
.collect();
|
||||
|
||||
// map that tracks indices of overlapping roots
|
||||
let mut overlap_map = FxHashMap::<_, Vec<_>>::default();
|
||||
let mut done = false;
|
||||
|
||||
while !mem::replace(&mut done, true) {
|
||||
// maps include paths to indices of the corresponding root
|
||||
let mut include_to_idx = FxHashMap::default();
|
||||
// Find and note down the indices of overlapping roots
|
||||
for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) {
|
||||
for include in &root.include {
|
||||
match include_to_idx.entry(include) {
|
||||
Entry::Occupied(e) => {
|
||||
overlap_map.entry(*e.get()).or_default().push(idx);
|
||||
}
|
||||
Entry::Vacant(e) => {
|
||||
e.insert(idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (k, v) in overlap_map.drain() {
|
||||
done = false;
|
||||
for v in v {
|
||||
let r = mem::replace(
|
||||
&mut roots[v],
|
||||
PackageRoot { is_local: false, include: vec![], exclude: vec![] },
|
||||
);
|
||||
roots[k].is_local |= r.is_local;
|
||||
roots[k].include.extend(r.include);
|
||||
roots[k].exclude.extend(r.exclude);
|
||||
}
|
||||
roots[k].include.sort();
|
||||
roots[k].exclude.sort();
|
||||
roots[k].include.dedup();
|
||||
roots[k].exclude.dedup();
|
||||
}
|
||||
}
|
||||
|
||||
for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
|
||||
let file_set_roots: Vec<VfsPath> =
|
||||
root.include.iter().cloned().map(VfsPath::from).collect();
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
|
|||
SymbolKind::Struct => lsp_types::SymbolKind::STRUCT,
|
||||
SymbolKind::Enum => lsp_types::SymbolKind::ENUM,
|
||||
SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER,
|
||||
SymbolKind::Trait => lsp_types::SymbolKind::INTERFACE,
|
||||
SymbolKind::Trait | SymbolKind::TraitAlias => lsp_types::SymbolKind::INTERFACE,
|
||||
SymbolKind::Macro
|
||||
| SymbolKind::BuiltinAttr
|
||||
| SymbolKind::Attribute
|
||||
|
@ -135,6 +135,7 @@ pub(crate) fn completion_item_kind(
|
|||
SymbolKind::Static => lsp_types::CompletionItemKind::VALUE,
|
||||
SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT,
|
||||
SymbolKind::Trait => lsp_types::CompletionItemKind::INTERFACE,
|
||||
SymbolKind::TraitAlias => lsp_types::CompletionItemKind::INTERFACE,
|
||||
SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT,
|
||||
SymbolKind::TypeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
|
||||
SymbolKind::Union => lsp_types::CompletionItemKind::STRUCT,
|
||||
|
@ -656,6 +657,7 @@ fn semantic_token_type_and_modifiers(
|
|||
SymbolKind::Union => semantic_tokens::UNION,
|
||||
SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
|
||||
SymbolKind::Trait => semantic_tokens::INTERFACE,
|
||||
SymbolKind::TraitAlias => semantic_tokens::INTERFACE,
|
||||
SymbolKind::Macro => semantic_tokens::MACRO,
|
||||
SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
|
||||
SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue