Merge commit 'aa9bc86125' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-06-05 12:04:23 +03:00
parent 1570299af4
commit c48062fe2a
598 changed files with 57696 additions and 17615 deletions

View file

@ -29,9 +29,8 @@ parking_lot = "0.12.1"
xflags = "0.3.0"
oorandom = "11.1.3"
rustc-hash = "1.1.0"
serde = { version = "1.0.137", features = ["derive"] }
serde_json = { version = "1.0.81", features = ["preserve_order"] }
threadpool = "1.8.1"
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
rayon = "1.6.1"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
@ -45,8 +44,20 @@ tracing-subscriber = { version = "0.3.16", default-features = false, features =
] }
tracing-log = "0.1.3"
tracing-tree = "0.2.1"
triomphe.workspace = true
nohash-hasher.workspace = true
always-assert = "0.1.2"
# These dependencies are unused, but we pin them to a version here to restrict them for our transitive dependencies
# so that we don't pull in duplicates of their dependencies like windows-sys and syn 1 vs 2
# these would pull in serde 2
thiserror = "=1.0.39"
serde_repr = "=0.1.11"
# these would pull in windows-sys 0.45.0
mio = "=0.8.5"
filetime = "=0.2.19"
parking_lot_core = "=0.9.6"
cfg.workspace = true
flycheck.workspace = true
hir-def.workspace = true
@ -57,7 +68,6 @@ ide-db.workspace = true
ide-ssr.workspace = true
ide.workspace = true
proc-macro-api.workspace = true
proc-macro-srv.workspace = true
profile.workspace = true
project-model.workspace = true
stdx.workspace = true
@ -75,7 +85,6 @@ jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = tr
[dev-dependencies]
expect-test = "1.4.0"
jod-thread = "0.1.2"
xshell = "0.2.2"
test-utils.workspace = true
@ -85,8 +94,4 @@ mbe.workspace = true
[features]
jemalloc = ["jemallocator", "profile/jemalloc"]
force-always-assert = ["always-assert/force"]
in-rust-tree = [
"proc-macro-srv/sysroot-abi",
"ide/in-rust-tree",
"syntax/in-rust-tree",
]
in-rust-tree = ["ide/in-rust-tree", "syntax/in-rust-tree"]

View file

@ -7,7 +7,11 @@
mod logger;
mod rustc_wrapper;
use std::{env, fs, path::Path, process};
use std::{
env, fs,
path::{Path, PathBuf},
process,
};
use lsp_server::Connection;
use rust_analyzer::{cli::flags, config::Config, from_json, Result};
@ -74,10 +78,16 @@ fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
println!("rust-analyzer {}", rust_analyzer::version());
return Ok(());
}
with_extra_thread("LspServer", run_server)?;
}
flags::RustAnalyzerCmd::ProcMacro(flags::ProcMacro) => {
with_extra_thread("MacroExpander", || proc_macro_srv::cli::run().map_err(Into::into))?;
// rust-analyzers “main thread” is actually
// a secondary latency-sensitive thread with an increased stack size.
// We use this thread intent because any delay in the main loop
// will make actions like hitting enter in the editor slow.
with_extra_thread(
"LspServer",
stdx::thread::ThreadIntent::LatencySensitive,
run_server,
)?;
}
flags::RustAnalyzerCmd::Parse(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Symbols(cmd) => cmd.run()?,
@ -135,14 +145,17 @@ const STACK_SIZE: usize = 1024 * 1024 * 8;
/// space.
fn with_extra_thread(
thread_name: impl Into<String>,
thread_intent: stdx::thread::ThreadIntent,
f: impl FnOnce() -> Result<()> + Send + 'static,
) -> Result<()> {
let handle =
std::thread::Builder::new().name(thread_name.into()).stack_size(STACK_SIZE).spawn(f)?;
match handle.join() {
Ok(res) => res,
Err(panic) => std::panic::resume_unwind(panic),
}
let handle = stdx::thread::Builder::new(thread_intent)
.name(thread_name.into())
.stack_size(STACK_SIZE)
.spawn(f)?;
handle.join()?;
Ok(())
}
fn run_server() -> Result<()> {
@ -152,12 +165,18 @@ fn run_server() -> Result<()> {
let (initialize_id, initialize_params) = connection.initialize_start()?;
tracing::info!("InitializeParams: {}", initialize_params);
let initialize_params =
from_json::<lsp_types::InitializeParams>("InitializeParams", &initialize_params)?;
let lsp_types::InitializeParams {
root_uri,
capabilities,
workspace_folders,
initialization_options,
client_info,
..
} = from_json::<lsp_types::InitializeParams>("InitializeParams", &initialize_params)?;
let root_path = match initialize_params
.root_uri
let root_path = match root_uri
.and_then(|it| it.to_file_path().ok())
.map(patch_path_prefix)
.and_then(|it| AbsPathBuf::try_from(it).ok())
{
Some(it) => it,
@ -167,19 +186,19 @@ fn run_server() -> Result<()> {
}
};
let workspace_roots = initialize_params
.workspace_folders
let workspace_roots = workspace_folders
.map(|workspaces| {
workspaces
.into_iter()
.filter_map(|it| it.uri.to_file_path().ok())
.map(patch_path_prefix)
.filter_map(|it| AbsPathBuf::try_from(it).ok())
.collect::<Vec<_>>()
})
.filter(|workspaces| !workspaces.is_empty())
.unwrap_or_else(|| vec![root_path.clone()]);
let mut config = Config::new(root_path, initialize_params.capabilities, workspace_roots);
if let Some(json) = initialize_params.initialization_options {
let mut config = Config::new(root_path, capabilities, workspace_roots);
if let Some(json) = initialization_options {
if let Err(e) = config.update(json) {
use lsp_types::{
notification::{Notification, ShowMessage},
@ -208,7 +227,7 @@ fn run_server() -> Result<()> {
connection.initialize_finish(initialize_id, initialize_result)?;
if let Some(client_info) = initialize_params.client_info {
if let Some(client_info) = client_info {
tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
}
@ -222,3 +241,42 @@ fn run_server() -> Result<()> {
tracing::info!("server did shut down");
Ok(())
}
fn patch_path_prefix(path: PathBuf) -> PathBuf {
use std::path::{Component, Prefix};
if cfg!(windows) {
// VSCode might report paths with the file drive in lowercase, but this can mess
// with env vars set by tools and build scripts executed by r-a such that it invalidates
// cargo's compilations unnecessarily. https://github.com/rust-lang/rust-analyzer/issues/14683
// So we just uppercase the drive letter here unconditionally.
// (doing it conditionally is a pain because std::path::Prefix always reports uppercase letters on windows)
let mut comps = path.components();
match comps.next() {
Some(Component::Prefix(prefix)) => {
let prefix = match prefix.kind() {
Prefix::Disk(d) => {
format!("{}:", d.to_ascii_uppercase() as char)
}
Prefix::VerbatimDisk(d) => {
format!(r"\\?\{}:", d.to_ascii_uppercase() as char)
}
_ => return path,
};
let mut path = PathBuf::new();
path.push(prefix);
path.extend(comps);
path
}
_ => path,
}
} else {
path
}
}
#[test]
#[cfg(windows)]
fn patch_path_prefix_works() {
assert_eq!(patch_path_prefix(r"c:\foo\bar".into()), PathBuf::from(r"C:\foo\bar"));
assert_eq!(patch_path_prefix(r"\\?\c:\foo\bar".into()), PathBuf::from(r"\\?\C:\foo\bar"));
}

View file

@ -23,13 +23,14 @@ use crate::semantic_tokens;
pub fn server_capabilities(config: &Config) -> ServerCapabilities {
ServerCapabilities {
position_encoding: Some(match negotiated_encoding(config.caps()) {
PositionEncoding::Utf8 => PositionEncodingKind::UTF8,
position_encoding: match negotiated_encoding(config.caps()) {
PositionEncoding::Utf8 => Some(PositionEncodingKind::UTF8),
PositionEncoding::Wide(wide) => match wide {
WideEncoding::Utf16 => PositionEncodingKind::UTF16,
WideEncoding::Utf32 => PositionEncodingKind::UTF32,
WideEncoding::Utf16 => Some(PositionEncodingKind::UTF16),
WideEncoding::Utf32 => Some(PositionEncodingKind::UTF32),
_ => None,
},
}),
},
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),

View file

@ -3,8 +3,9 @@
use std::mem;
use cfg::{CfgAtom, CfgExpr};
use ide::{Cancellable, FileId, RunnableKind, TestId};
use ide::{Cancellable, CrateId, FileId, RunnableKind, TestId};
use project_model::{self, CargoFeatures, ManifestPath, TargetKind};
use rustc_hash::FxHashSet;
use vfs::AbsPathBuf;
use crate::global_state::GlobalStateSnapshot;
@ -20,7 +21,9 @@ pub(crate) struct CargoTargetSpec {
pub(crate) package: String,
pub(crate) target: String,
pub(crate) target_kind: TargetKind,
pub(crate) crate_id: CrateId,
pub(crate) required_features: Vec<String>,
pub(crate) features: FxHashSet<String>,
}
impl CargoTargetSpec {
@ -73,12 +76,13 @@ impl CargoTargetSpec {
}
}
let target_required_features = if let Some(mut spec) = spec {
let (allowed_features, target_required_features) = if let Some(mut spec) = spec {
let allowed_features = mem::take(&mut spec.features);
let required_features = mem::take(&mut spec.required_features);
spec.push_to(&mut args, kind);
required_features
(allowed_features, required_features)
} else {
Vec::new()
(Default::default(), Default::default())
};
let cargo_config = snap.config.cargo();
@ -97,7 +101,9 @@ impl CargoTargetSpec {
required_features(cfg, &mut feats);
}
feats.extend(features.iter().cloned());
feats.extend(
features.iter().filter(|&feat| allowed_features.contains(feat)).cloned(),
);
feats.extend(target_required_features);
feats.dedup();
@ -136,6 +142,8 @@ impl CargoTargetSpec {
target: target_data.name.clone(),
target_kind: target_data.kind,
required_features: target_data.required_features.clone(),
features: package_data.features.keys().cloned().collect(),
crate_id,
};
Ok(Some(res))

View file

@ -8,14 +8,14 @@ use std::{
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef,
AssocItem, Crate, Function, HasCrate, HasSource, HirDisplay, ModuleDef,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
expr::{ExprId, PatId},
hir::{ExprId, PatId},
FunctionId,
};
use hir_ty::{Interner, TyExt, TypeFlags};
use hir_ty::{Interner, Substitution, TyExt, TypeFlags};
use ide::{Analysis, AnalysisHost, LineCol, RootDatabase};
use ide_db::base_db::{
salsa::{self, debug::DebugQueryTable, ParallelDatabase},
@ -121,14 +121,19 @@ impl flags::AnalysisStats {
eprint!(" crates: {num_crates}");
let mut num_decls = 0;
let mut funcs = Vec::new();
let mut adts = Vec::new();
let mut consts = Vec::new();
while let Some(module) = visit_queue.pop() {
if visited_modules.insert(module) {
visit_queue.extend(module.children(db));
for decl in module.declarations(db) {
num_decls += 1;
if let ModuleDef::Function(f) = decl {
funcs.push(f);
match decl {
ModuleDef::Function(f) => funcs.push(f),
ModuleDef::Adt(a) => adts.push(a),
ModuleDef::Const(c) => consts.push(c),
_ => (),
}
}
@ -153,6 +158,13 @@ impl flags::AnalysisStats {
self.run_inference(&host, db, &vfs, &funcs, verbosity);
}
if !self.skip_mir_stats {
self.run_mir_lowering(db, &funcs, verbosity);
}
self.run_data_layout(db, &adts, verbosity);
self.run_const_eval(db, &consts, verbosity);
let total_span = analysis_sw.elapsed();
eprintln!("{:<20} {total_span}", "Total:");
report_metric("total time", total_span.time.as_millis() as u64, "ms");
@ -175,9 +187,8 @@ impl flags::AnalysisStats {
let mut total_macro_file_size = Bytes::default();
for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
if let Some((val, _)) = db.parse_macro_expansion(e.key).value {
total_macro_file_size += syntax_len(val.syntax_node())
}
let val = db.parse_macro_expansion(e.key).value.0;
total_macro_file_size += syntax_len(val.syntax_node())
}
eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
}
@ -189,6 +200,93 @@ impl flags::AnalysisStats {
Ok(())
}
fn run_data_layout(&self, db: &RootDatabase, adts: &[hir::Adt], verbosity: Verbosity) {
let mut sw = self.stop_watch();
let mut all = 0;
let mut fail = 0;
for &a in adts {
if db.generic_params(a.into()).iter().next().is_some() {
// Data types with generics don't have layout.
continue;
}
all += 1;
let Err(e) = db.layout_of_adt(hir_def::AdtId::from(a).into(), Substitution::empty(Interner), a.krate(db).into()) else {
continue;
};
if verbosity.is_spammy() {
let full_name = a
.module(db)
.path_to_root(db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
.chain(Some(a.name(db)))
.map(|it| it.display(db).to_string())
.join("::");
println!("Data layout for {full_name} failed due {e:?}");
}
fail += 1;
}
eprintln!("{:<20} {}", "Data layouts:", sw.elapsed());
eprintln!("Failed data layouts: {fail} ({}%)", percentage(fail, all));
report_metric("failed data layouts", fail, "#");
}
fn run_const_eval(&self, db: &RootDatabase, consts: &[hir::Const], verbosity: Verbosity) {
let mut sw = self.stop_watch();
let mut all = 0;
let mut fail = 0;
for &c in consts {
all += 1;
let Err(e) = c.render_eval(db) else {
continue;
};
if verbosity.is_spammy() {
let full_name = c
.module(db)
.path_to_root(db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
.chain(c.name(db))
.map(|it| it.display(db).to_string())
.join("::");
println!("Const eval for {full_name} failed due {e:?}");
}
fail += 1;
}
eprintln!("{:<20} {}", "Const evaluation:", sw.elapsed());
eprintln!("Failed const evals: {fail} ({}%)", percentage(fail, all));
report_metric("failed const evals", fail, "#");
}
fn run_mir_lowering(&self, db: &RootDatabase, funcs: &[Function], verbosity: Verbosity) {
let mut sw = self.stop_watch();
let all = funcs.len() as u64;
let mut fail = 0;
for f in funcs {
let Err(e) = db.mir_body(FunctionId::from(*f).into()) else {
continue;
};
if verbosity.is_spammy() {
let full_name = f
.module(db)
.path_to_root(db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
.chain(Some(f.name(db)))
.map(|it| it.display(db).to_string())
.join("::");
println!("Mir body for {full_name} failed due {e:?}");
}
fail += 1;
}
eprintln!("{:<20} {}", "MIR lowering:", sw.elapsed());
eprintln!("Mir failed bodies: {fail} ({}%)", percentage(fail, all));
report_metric("mir failed bodies", fail, "#");
}
fn run_inference(
&self,
host: &AnalysisHost,
@ -237,9 +335,10 @@ impl flags::AnalysisStats {
.rev()
.filter_map(|it| it.name(db))
.chain(Some(f.name(db)))
.map(|it| it.display(db).to_string())
.join("::");
if let Some(only_name) = self.only.as_deref() {
if name.to_string() != only_name && full_name != only_name {
if name.display(db).to_string() != only_name && full_name != only_name {
continue;
}
}
@ -281,7 +380,7 @@ impl flags::AnalysisStats {
end.col,
));
} else {
bar.println(format!("{name}: Unknown type",));
bar.println(format!("{}: Unknown type", name.display(db)));
}
}
true
@ -336,7 +435,7 @@ impl flags::AnalysisStats {
} else {
bar.println(format!(
"{}: Expected {}, got {}",
name,
name.display(db),
mismatch.expected.display(db),
mismatch.actual.display(db)
));
@ -384,7 +483,7 @@ impl flags::AnalysisStats {
end.col,
));
} else {
bar.println(format!("{name}: Unknown type",));
bar.println(format!("{}: Unknown type", name.display(db)));
}
}
true
@ -438,7 +537,7 @@ impl flags::AnalysisStats {
} else {
bar.println(format!(
"{}: Expected {}, got {}",
name,
name.display(db),
mismatch.expected.display(db),
mismatch.actual.display(db)
));
@ -510,7 +609,7 @@ fn location_csv_expr(
Ok(s) => s,
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
};
let root = db.parse_or_expand(src.file_id).unwrap();
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
@ -532,7 +631,7 @@ fn location_csv_pat(
Ok(s) => s,
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
};
let root = db.parse_or_expand(src.file_id).unwrap();
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| {
e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone())
});
@ -554,7 +653,7 @@ fn expr_syntax_range(
) -> Option<(VfsPath, LineCol, LineCol)> {
let src = sm.expr_syntax(expr_id);
if let Ok(src) = src {
let root = db.parse_or_expand(src.file_id).unwrap();
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
@ -576,7 +675,7 @@ fn pat_syntax_range(
) -> Option<(VfsPath, LineCol, LineCol)> {
let src = sm.pat_syntax(pat_id);
if let Ok(src) = src {
let root = db.parse_or_expand(src.file_id).unwrap();
let root = db.parse_or_expand(src.file_id);
let node = src.map(|e| {
e.either(
|it| it.to_node(&root).syntax().clone(),

View file

@ -66,6 +66,8 @@ xflags::xflags! {
optional --memory-usage
/// Print the total length of all source and macro files (whitespace is not counted).
optional --source-stats
/// Only type check, skip lowering to mir
optional --skip-mir-stats
/// Only analyze items matching this path.
optional -o, --only path: String
@ -104,14 +106,15 @@ xflags::xflags! {
optional --debug snippet: String
}
cmd proc-macro {}
cmd lsif {
required path: PathBuf
}
cmd scip {
required path: PathBuf
/// The output path where the SCIP file will be written to. Defaults to `index.scip`.
optional --output path: PathBuf
}
}
}
@ -139,7 +142,6 @@ pub enum RustAnalyzerCmd {
Diagnostics(Diagnostics),
Ssr(Ssr),
Search(Search),
ProcMacro(ProcMacro),
Lsif(Lsif),
Scip(Scip),
}
@ -172,6 +174,7 @@ pub struct AnalysisStats {
pub parallel: bool,
pub memory_usage: bool,
pub source_stats: bool,
pub skip_mir_stats: bool,
pub only: Option<String>,
pub with_deps: bool,
pub no_sysroot: bool,
@ -200,9 +203,6 @@ pub struct Search {
pub debug: Option<String>,
}
#[derive(Debug)]
pub struct ProcMacro;
#[derive(Debug)]
pub struct Lsif {
pub path: PathBuf,
@ -211,6 +211,7 @@ pub struct Lsif {
#[derive(Debug)]
pub struct Scip {
pub path: PathBuf,
pub output: Option<PathBuf>,
}
impl RustAnalyzer {

View file

@ -1,14 +1,17 @@
//! Loads a Cargo project into a static instance of analysis, without support
//! for incorporating changes.
use std::{convert::identity, path::Path, sync::Arc};
use std::path::Path;
use anyhow::Result;
use anyhow::{anyhow, Result};
use crossbeam_channel::{unbounded, Receiver};
use hir::db::DefDatabase;
use ide::{AnalysisHost, Change};
use ide_db::{base_db::CrateGraph, FxHashMap};
use ide_db::{
base_db::{CrateGraph, ProcMacros},
FxHashMap,
};
use proc_macro_api::ProcMacroServer;
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
use triomphe::Arc;
use vfs::{loader::Handle, AbsPath, AbsPathBuf};
use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig};
@ -24,7 +27,7 @@ pub struct LoadCargoConfig {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ProcMacroServerChoice {
Sysroot,
Explicit(AbsPathBuf, Vec<String>),
Explicit(AbsPathBuf),
None,
}
@ -66,23 +69,17 @@ pub fn load_workspace(
Box::new(loader)
};
let proc_macro_client = match &load_config.with_proc_macro_server {
let proc_macro_server = match &load_config.with_proc_macro_server {
ProcMacroServerChoice::Sysroot => ws
.find_sysroot_proc_macro_srv()
.ok_or_else(|| "failed to find sysroot proc-macro server".to_owned())
.and_then(|it| {
ProcMacroServer::spawn(it, identity::<&[&str]>(&[])).map_err(|e| e.to_string())
}),
ProcMacroServerChoice::Explicit(path, args) => {
ProcMacroServer::spawn(path.clone(), args).map_err(|e| e.to_string())
.and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)),
ProcMacroServerChoice::Explicit(path) => {
ProcMacroServer::spawn(path.clone()).map_err(Into::into)
}
ProcMacroServerChoice::None => Err("proc macro server disabled".to_owned()),
ProcMacroServerChoice::None => Err(anyhow!("proc macro server disabled")),
};
let crate_graph = ws.to_crate_graph(
&mut |_, path: &AbsPath| {
load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[])
},
let (crate_graph, proc_macros) = ws.to_crate_graph(
&mut |path: &AbsPath| {
let contents = loader.load_sync(path);
let path = vfs::VfsPath::from(path.to_path_buf());
@ -91,6 +88,28 @@ pub fn load_workspace(
},
extra_env,
);
let proc_macros = {
let proc_macro_server = match &proc_macro_server {
Ok(it) => Ok(it),
Err(e) => Err(e.to_string()),
};
proc_macros
.into_iter()
.map(|(crate_id, path)| {
(
crate_id,
path.map_or_else(
|_| Err("proc macro crate is missing dylib".to_owned()),
|(_, path)| {
proc_macro_server.as_ref().map_err(Clone::clone).and_then(
|proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
)
},
),
)
})
.collect()
};
let project_folders = ProjectFolders::new(&[ws], &[]);
loader.set_config(vfs::loader::Config {
@ -100,17 +119,23 @@ pub fn load_workspace(
});
tracing::debug!("crate graph: {:?}", crate_graph);
let host =
load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
let host = load_crate_graph(
crate_graph,
proc_macros,
project_folders.source_root_config,
&mut vfs,
&receiver,
);
if load_config.prefill_caches {
host.analysis().parallel_prime_caches(1, |_| {})?;
}
Ok((host, vfs, proc_macro_client.ok()))
Ok((host, vfs, proc_macro_server.ok()))
}
fn load_crate_graph(
crate_graph: CrateGraph,
proc_macros: ProcMacros,
source_root_config: SourceRootConfig,
vfs: &mut vfs::Vfs,
receiver: &Receiver<vfs::loader::Message>,
@ -119,7 +144,7 @@ fn load_crate_graph(
let mut host = AnalysisHost::new(lru_cap);
let mut analysis_change = Change::new();
host.raw_database_mut().set_enable_proc_attr_macros(true);
host.raw_database_mut().enable_proc_attr_macros();
// wait until Vfs has loaded all roots
for task in receiver {
@ -139,9 +164,9 @@ fn load_crate_graph(
let changes = vfs.take_changes();
for file in changes {
if file.exists() {
let contents = vfs.file_contents(file.file_id).to_vec();
if let Ok(text) = String::from_utf8(contents) {
analysis_change.change_file(file.file_id, Some(Arc::new(text)))
let contents = vfs.file_contents(file.file_id);
if let Ok(text) = std::str::from_utf8(contents) {
analysis_change.change_file(file.file_id, Some(Arc::from(text)))
}
}
}
@ -149,6 +174,7 @@ fn load_crate_graph(
analysis_change.set_roots(source_roots);
analysis_change.set_crate_graph(crate_graph);
analysis_change.set_proc_macros(proc_macros);
host.apply_change(analysis_change);
host

View file

@ -2,6 +2,7 @@
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
time::Instant,
};
@ -9,7 +10,6 @@ use crate::{
cli::load_cargo::ProcMacroServerChoice,
line_index::{LineEndings, LineIndex, PositionEncoding},
};
use hir::Name;
use ide::{
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
TokenStaticData,
@ -66,7 +66,6 @@ impl flags::Scip {
.as_os_str()
.to_str()
.ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
.to_string()
),
text_document_encoding: scip_types::TextEncoding::UTF8.into(),
special_fields: Default::default(),
@ -167,7 +166,8 @@ impl flags::Scip {
special_fields: Default::default(),
};
scip::write_message_to_file("index.scip", index)
let out_path = self.output.unwrap_or_else(|| PathBuf::from(r"index.scip"));
scip::write_message_to_file(out_path, index)
.map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
eprintln!("Generating SCIP finished {:?}", now.elapsed());
@ -210,13 +210,12 @@ fn new_descriptor_str(
}
}
fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
let mut name = name.to_string();
if name.contains("'") {
name = format!("`{name}`");
fn new_descriptor(name: &str, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
if name.contains('\'') {
new_descriptor_str(&format!("`{name}`"), suffix)
} else {
new_descriptor_str(&name, suffix)
}
new_descriptor_str(name.as_str(), suffix)
}
/// Loosely based on `def_to_moniker`
@ -236,7 +235,7 @@ fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> {
.iter()
.map(|desc| {
new_descriptor(
desc.name.clone(),
&desc.name,
match desc.desc {
MonikerDescriptorKind::Namespace => Namespace,
MonikerDescriptorKind::Type => Type,

View file

@ -7,13 +7,14 @@
//! configure the server itself, feature flags are passed into analysis, and
//! tweak things like automatic insertion of `()` in completions.
use std::{fmt, iter, path::PathBuf};
use std::{fmt, iter, ops::Not, path::PathBuf};
use cfg::{CfgAtom, CfgDiff};
use flycheck::FlycheckConfig;
use ide::{
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig,
JoinLinesConfig, Snippet, SnippetScope,
JoinLinesConfig, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, Snippet, SnippetScope,
};
use ide_db::{
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
@ -23,11 +24,10 @@ use itertools::Itertools;
use lsp_types::{ClientCapabilities, MarkupKind};
use project_model::{
CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustLibSource,
UnsetTestCrates,
};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{de::DeserializeOwned, Deserialize};
use vfs::AbsPathBuf;
use vfs::{AbsPath, AbsPathBuf};
use crate::{
caps::completion_item_edit_resolve,
@ -101,6 +101,8 @@ config_data! {
/// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
/// avoid checking unnecessary things.
cargo_buildScripts_useRustcWrapper: bool = "true",
/// List of cfg options to enable with the given values.
cargo_cfgs: FxHashMap<String, String> = "{}",
/// Extra arguments that are passed to every cargo invocation.
cargo_extraArgs: Vec<String> = "[]",
/// Extra environment variables that will be set when running cargo, rustc
@ -128,7 +130,7 @@ config_data! {
// FIXME(@poliorcetics): move to multiple targets here too, but this will need more work
// than `checkOnSave_target`
cargo_target: Option<String> = "null",
/// Unsets `#[cfg(test)]` for the specified crates.
/// Unsets the implicit `#[cfg(test)]` for the specified crates.
cargo_unsetTest: Vec<String> = "[\"core\"]",
/// Run the check command for diagnostics on save.
@ -281,6 +283,8 @@ config_data! {
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
highlightRelated_breakPoints_enable: bool = "true",
/// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
highlightRelated_closureCaptures_enable: bool = "true",
/// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
highlightRelated_exitPoints_enable: bool = "true",
/// Enables highlighting of related references while the cursor is on any identifier.
@ -311,8 +315,18 @@ config_data! {
/// Whether to show keyword hover popups. Only applies when
/// `#rust-analyzer.hover.documentation.enable#` is set.
hover_documentation_keywords_enable: bool = "true",
/// Use markdown syntax for links in hover.
/// Use markdown syntax for links on hover.
hover_links_enable: bool = "true",
/// How to render the align information in a memory layout hover.
hover_memoryLayout_alignment: Option<MemoryLayoutHoverRenderKindDef> = "\"hexadecimal\"",
/// Whether to show memory layout data on hover.
hover_memoryLayout_enable: bool = "true",
/// How to render the niche information in a memory layout hover.
hover_memoryLayout_niches: Option<bool> = "false",
/// How to render the offset information in a memory layout hover.
hover_memoryLayout_offset: Option<MemoryLayoutHoverRenderKindDef> = "\"hexadecimal\"",
/// How to render the size information in a memory layout hover.
hover_memoryLayout_size: Option<MemoryLayoutHoverRenderKindDef> = "\"both\"",
/// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
imports_granularity_enforce: bool = "false",
@ -336,8 +350,12 @@ config_data! {
/// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
/// to always show them).
inlayHints_closingBraceHints_minLines: usize = "25",
/// Whether to show inlay hints for closure captures.
inlayHints_closureCaptureHints_enable: bool = "false",
/// Whether to show inlay type hints for return types of closures.
inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
/// Closure notation in type and chaining inlay hints.
inlayHints_closureStyle: ClosureStyle = "\"impl_fn\"",
/// Whether to show enum variant discriminant hints.
inlayHints_discriminantHints_enable: DiscriminantHintsDef = "\"never\"",
/// Whether to show inlay hints for type adjustments.
@ -418,6 +436,8 @@ config_data! {
/// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
lru_capacity: Option<usize> = "null",
/// Sets the LRU capacity of the specified queries.
lru_query_capacities: FxHashMap<Box<str>, usize> = "{}",
/// Whether to show `can't find Cargo.toml` error message.
notifications_cargoTomlNotFound: bool = "true",
@ -433,8 +453,7 @@ config_data! {
///
/// This config takes a map of crate names with the exported proc-macro names to ignore as values.
procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = "{}",
/// Internal config, path to proc-macro server executable (typically,
/// this is rust-analyzer itself, but we override this in tests).
/// Internal config, path to proc-macro server executable.
procMacro_server: Option<PathBuf> = "null",
/// Exclude imports from find-all-references.
@ -474,6 +493,8 @@ config_data! {
/// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
/// doc links.
semanticHighlighting_doc_comment_inject_enable: bool = "true",
/// Whether the server is allowed to emit non-standard tokens and modifiers.
semanticHighlighting_nonStandardTokens: bool = "true",
/// Use semantic tokens for operators.
///
/// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
@ -484,7 +505,7 @@ config_data! {
/// When enabled, rust-analyzer will emit special token types for operator tokens instead
/// of the generic `operator` token type.
semanticHighlighting_operator_specialization_enable: bool = "false",
/// Use semantic tokens for punctuations.
/// Use semantic tokens for punctuation.
///
/// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
/// they are tagged with modifiers or have a special role.
@ -492,7 +513,7 @@ config_data! {
/// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
/// calls.
semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
/// Use specialized semantic tokens for punctuations.
/// Use specialized semantic tokens for punctuation.
///
/// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
/// of the generic `punctuation` token type.
@ -531,8 +552,9 @@ impl Default for ConfigData {
#[derive(Debug, Clone)]
pub struct Config {
pub discovered_projects: Option<Vec<ProjectManifest>>,
pub workspace_roots: Vec<AbsPathBuf>,
discovered_projects: Vec<ProjectManifest>,
/// The workspace roots as registered by the LSP client
workspace_roots: Vec<AbsPathBuf>,
caps: lsp_types::ClientCapabilities,
root_path: AbsPathBuf,
data: ConfigData,
@ -570,6 +592,7 @@ pub struct LensConfig {
// runnables
pub run: bool,
pub debug: bool,
pub interpret: bool,
// implementations
pub implementations: bool,
@ -707,11 +730,11 @@ pub struct ClientCommandsConfig {
}
#[derive(Debug)]
pub struct ConfigUpdateError {
pub struct ConfigError {
errors: Vec<(String, serde_json::Error)>,
}
impl fmt::Display for ConfigUpdateError {
impl fmt::Display for ConfigError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let errors = self.errors.iter().format_with("\n", |(key, e), f| {
f(key)?;
@ -720,8 +743,7 @@ impl fmt::Display for ConfigUpdateError {
});
write!(
f,
"rust-analyzer found {} invalid config value{}:\n{}",
self.errors.len(),
"invalid config value{}:\n{}",
if self.errors.len() == 1 { "" } else { "s" },
errors
)
@ -738,7 +760,7 @@ impl Config {
caps,
data: ConfigData::default(),
detached_files: Vec::new(),
discovered_projects: None,
discovered_projects: Vec::new(),
root_path,
snippets: Default::default(),
workspace_roots,
@ -751,10 +773,20 @@ impl Config {
if discovered.is_empty() {
tracing::error!("failed to find any projects in {:?}", &self.workspace_roots);
}
self.discovered_projects = Some(discovered);
self.discovered_projects = discovered;
}
pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
pub fn remove_workspace(&mut self, path: &AbsPath) {
if let Some(position) = self.workspace_roots.iter().position(|it| it == path) {
self.workspace_roots.remove(position);
}
}
pub fn add_workspaces(&mut self, paths: impl Iterator<Item = AbsPathBuf>) {
self.workspace_roots.extend(paths);
}
pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigError> {
tracing::info!("updating config from JSON: {:#}", json);
if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
return Ok(());
@ -801,7 +833,7 @@ impl Config {
if errors.is_empty() {
Ok(())
} else {
Err(ConfigUpdateError { errors })
Err(ConfigError { errors })
}
}
@ -856,25 +888,19 @@ impl Config {
pub fn linked_projects(&self) -> Vec<LinkedProject> {
match self.data.linkedProjects.as_slice() {
[] => {
match self.discovered_projects.as_ref() {
Some(discovered_projects) => {
let exclude_dirs: Vec<_> = self
.data
.files_excludeDirs
.iter()
.map(|p| self.root_path.join(p))
.collect();
discovered_projects
.iter()
.filter(|(ProjectManifest::ProjectJson(path) | ProjectManifest::CargoToml(path))| {
let exclude_dirs: Vec<_> =
self.data.files_excludeDirs.iter().map(|p| self.root_path.join(p)).collect();
self.discovered_projects
.iter()
.filter(
|(ProjectManifest::ProjectJson(path)
| ProjectManifest::CargoToml(path))| {
!exclude_dirs.iter().any(|p| path.starts_with(p))
})
.cloned()
.map(LinkedProject::from)
.collect()
}
None => Vec::new(),
}
},
)
.cloned()
.map(LinkedProject::from)
.collect()
}
linked_projects => linked_projects
.iter()
@ -1013,6 +1039,11 @@ impl Config {
.is_some()
}
pub fn semantics_tokens_augments_syntax_tokens(&self) -> bool {
try_!(self.caps.text_document.as_ref()?.semantic_tokens.as_ref()?.augments_syntax_tokens?)
.unwrap_or(false)
}
pub fn position_encoding(&self) -> PositionEncoding {
negotiated_encoding(&self.caps)
}
@ -1025,6 +1056,10 @@ impl Config {
self.experimental("codeActionGroup")
}
pub fn local_docs(&self) -> bool {
self.experimental("localDocs")
}
pub fn open_server_logs(&self) -> bool {
self.experimental("openServerLogs")
}
@ -1085,27 +1120,27 @@ impl Config {
extra_env
}
pub fn lru_capacity(&self) -> Option<usize> {
pub fn lru_parse_query_capacity(&self) -> Option<usize> {
self.data.lru_capacity
}
pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, /* is path explicitly set */ bool)> {
if !self.data.procMacro_enable {
return None;
}
Some(match &self.data.procMacro_server {
Some(it) => (
AbsPathBuf::try_from(it.clone()).unwrap_or_else(|path| self.root_path.join(path)),
true,
),
None => (AbsPathBuf::assert(std::env::current_exe().ok()?), false),
})
pub fn lru_query_capacities(&self) -> Option<&FxHashMap<Box<str>, usize>> {
self.data.lru_query_capacities.is_empty().not().then(|| &self.data.lru_query_capacities)
}
pub fn proc_macro_srv(&self) -> Option<AbsPathBuf> {
let path = self.data.procMacro_server.clone()?;
Some(AbsPathBuf::try_from(path).unwrap_or_else(|path| self.root_path.join(&path)))
}
pub fn dummy_replacements(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> {
&self.data.procMacro_ignored
}
pub fn expand_proc_macros(&self) -> bool {
self.data.procMacro_enable
}
pub fn expand_proc_attr_macros(&self) -> bool {
self.data.procMacro_enable && self.data.procMacro_attributes_enable
}
@ -1164,7 +1199,34 @@ impl Config {
sysroot,
sysroot_src,
rustc_source,
unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
cfg_overrides: project_model::CfgOverrides {
global: CfgDiff::new(
self.data
.cargo_cfgs
.iter()
.map(|(key, val)| {
if val.is_empty() {
CfgAtom::Flag(key.into())
} else {
CfgAtom::KeyValue { key: key.into(), value: val.into() }
}
})
.collect(),
vec![],
)
.unwrap(),
selective: self
.data
.cargo_unsetTest
.iter()
.map(|it| {
(
it.clone(),
CfgDiff::new(vec![], vec![CfgAtom::Flag("test".into())]).unwrap(),
)
})
.collect(),
},
wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
invocation_strategy: match self.data.cargo_buildScripts_invocationStrategy {
InvocationStrategy::Once => project_model::InvocationStrategy::Once,
@ -1291,6 +1353,13 @@ impl Config {
hide_closure_initialization_hints: self
.data
.inlayHints_typeHints_hideClosureInitialization,
closure_style: match self.data.inlayHints_closureStyle {
ClosureStyle::ImplFn => hir::ClosureStyle::ImplFn,
ClosureStyle::RustAnalyzer => hir::ClosureStyle::RANotation,
ClosureStyle::WithId => hir::ClosureStyle::ClosureWithId,
ClosureStyle::Hide => hir::ClosureStyle::Hide,
},
closure_capture_hints: self.data.inlayHints_closureCaptureHints_enable,
adjustment_hints: match self.data.inlayHints_expressionAdjustmentHints_enable {
AdjustmentHintsDef::Always => ide::AdjustmentHints::Always,
AdjustmentHintsDef::Never => match self.data.inlayHints_reborrowHints_enable {
@ -1409,6 +1478,9 @@ impl Config {
LensConfig {
run: self.data.lens_enable && self.data.lens_run_enable,
debug: self.data.lens_enable && self.data.lens_debug_enable,
interpret: self.data.lens_enable
&& self.data.lens_run_enable
&& self.data.interpret_tests,
implementations: self.data.lens_enable && self.data.lens_implementations_enable,
method_refs: self.data.lens_enable && self.data.lens_references_method_enable,
refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable,
@ -1430,6 +1502,10 @@ impl Config {
}
}
pub fn highlighting_non_standard_tokens(&self) -> bool {
self.data.semanticHighlighting_nonStandardTokens
}
pub fn highlighting_config(&self) -> HighlightConfig {
HighlightConfig {
strings: self.data.semanticHighlighting_strings_enable,
@ -1446,8 +1522,19 @@ impl Config {
}
pub fn hover(&self) -> HoverConfig {
let mem_kind = |kind| match kind {
MemoryLayoutHoverRenderKindDef::Both => MemoryLayoutHoverRenderKind::Both,
MemoryLayoutHoverRenderKindDef::Decimal => MemoryLayoutHoverRenderKind::Decimal,
MemoryLayoutHoverRenderKindDef::Hexadecimal => MemoryLayoutHoverRenderKind::Hexadecimal,
};
HoverConfig {
links_in_hover: self.data.hover_links_enable,
memory_layout: self.data.hover_memoryLayout_enable.then_some(MemoryLayoutHoverConfig {
size: self.data.hover_memoryLayout_size.map(mem_kind),
offset: self.data.hover_memoryLayout_offset.map(mem_kind),
alignment: self.data.hover_memoryLayout_alignment.map(mem_kind),
niches: self.data.hover_memoryLayout_niches.unwrap_or_default(),
}),
documentation: self.data.hover_documentation_enable,
format: {
let is_markdown = try_or_def!(self
@ -1467,7 +1554,6 @@ impl Config {
}
},
keywords: self.data.hover_documentation_keywords_enable,
interpret_tests: self.data.interpret_tests,
}
}
@ -1537,6 +1623,7 @@ impl Config {
break_points: self.data.highlightRelated_breakPoints_enable,
exit_points: self.data.highlightRelated_exitPoints_enable,
yield_points: self.data.highlightRelated_yieldPoints_enable,
closure_captures: self.data.highlightRelated_closureCaptures_enable,
}
}
@ -1657,6 +1744,9 @@ mod de_unit_v {
named_unit_variant!(reborrow);
named_unit_variant!(fieldless);
named_unit_variant!(with_block);
named_unit_variant!(decimal);
named_unit_variant!(hexadecimal);
named_unit_variant!(both);
}
#[derive(Deserialize, Debug, Clone, Copy)]
@ -1797,6 +1887,15 @@ enum ClosureReturnTypeHintsDef {
WithBlock,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum ClosureStyle {
ImplFn,
RustAnalyzer,
WithId,
Hide,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(untagged)]
enum ReborrowHintsDef {
@ -1878,6 +1977,18 @@ enum WorkspaceSymbolSearchKindDef {
AllSymbols,
}
#[derive(Deserialize, Debug, Copy, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(untagged)]
pub enum MemoryLayoutHoverRenderKindDef {
#[serde(deserialize_with = "de_unit_v::decimal")]
Decimal,
#[serde(deserialize_with = "de_unit_v::hexadecimal")]
Hexadecimal,
#[serde(deserialize_with = "de_unit_v::both")]
Both,
}
macro_rules! _config_data {
(struct $name:ident {
$(
@ -1940,7 +2051,7 @@ fn get_field<T: DeserializeOwned>(
alias: Option<&'static str>,
default: &str,
) -> T {
// XXX: check alias first, to work-around the VS Code where it pre-fills the
// XXX: check alias first, to work around the VS Code where it pre-fills the
// defaults instead of sending an empty object.
alias
.into_iter()
@ -1960,7 +2071,9 @@ fn get_field<T: DeserializeOwned>(
None
}
})
.unwrap_or_else(|| serde_json::from_str(default).unwrap())
.unwrap_or_else(|| {
serde_json::from_str(default).unwrap_or_else(|e| panic!("{e} on: `{default}`"))
})
}
fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
@ -2020,6 +2133,9 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"FxHashMap<String, String>" => set! {
"type": "object",
},
"FxHashMap<Box<str>, usize>" => set! {
"type": "object",
},
"Option<usize>" => set! {
"type": ["null", "integer"],
"minimum": 0,
@ -2169,8 +2285,8 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"enumDescriptions": [
"Always show adjustment hints as prefix (`*expr`).",
"Always show adjustment hints as postfix (`expr.*`).",
"Show prefix or postfix depending on which uses less parenthesis, prefering prefix.",
"Show prefix or postfix depending on which uses less parenthesis, prefering postfix.",
"Show prefix or postfix depending on which uses less parenthesis, preferring prefix.",
"Show prefix or postfix depending on which uses less parenthesis, preferring postfix.",
]
},
"CargoFeaturesDef" => set! {
@ -2275,6 +2391,32 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
},
],
},
"ClosureStyle" => set! {
"type": "string",
"enum": ["impl_fn", "rust_analyzer", "with_id", "hide"],
"enumDescriptions": [
"`impl_fn`: `impl FnMut(i32, u64) -> i8`",
"`rust_analyzer`: `|i32, u64| -> i8`",
"`with_id`: `{closure#14352}`, where that id is the unique number of the closure in r-a internals",
"`hide`: Shows `...` for every closure type",
],
},
"Option<MemoryLayoutHoverRenderKindDef>" => set! {
"anyOf": [
{
"type": "null"
},
{
"type": "string",
"enum": ["both", "decimal", "hexadecimal", ],
"enumDescriptions": [
"Render as 12 (0xC)",
"Render as 12",
"Render as 0xC"
],
},
],
},
_ => panic!("missing entry for {ty}: {default}"),
}
@ -2384,4 +2526,43 @@ mod tests {
fn remove_ws(text: &str) -> String {
text.replace(char::is_whitespace, "")
}
#[test]
fn proc_macro_srv_null() {
let mut config =
Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
config
.update(serde_json::json!({
"procMacro_server": null,
}))
.unwrap();
assert_eq!(config.proc_macro_srv(), None);
}
#[test]
fn proc_macro_srv_abs() {
let mut config =
Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
config
.update(serde_json::json!({
"procMacro": {"server": project_root().display().to_string()}
}))
.unwrap();
assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap()));
}
#[test]
fn proc_macro_srv_rel() {
let mut config =
Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
config
.update(serde_json::json!({
"procMacro": {"server": "./server"}
}))
.unwrap();
assert_eq!(
config.proc_macro_srv(),
Some(AbsPathBuf::try_from(project_root().join("./server")).unwrap())
);
}
}

View file

@ -1,15 +1,16 @@
//! Book keeping for keeping diagnostics easily in sync with the client.
pub(crate) mod to_proto;
use std::{mem, sync::Arc};
use std::mem;
use ide::FileId;
use ide_db::FxHashMap;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use nohash_hasher::{IntMap, IntSet};
use triomphe::Arc;
use crate::lsp_ext;
pub(crate) type CheckFixes = Arc<NoHashHashMap<usize, NoHashHashMap<FileId, Vec<Fix>>>>;
pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>;
#[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig {
@ -20,12 +21,12 @@ pub struct DiagnosticsMapConfig {
#[derive(Debug, Default, Clone)]
pub(crate) struct DiagnosticCollection {
// FIXME: should be NoHashHashMap<FileId, Vec<ra_id::Diagnostic>>
pub(crate) native: NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>,
// FIXME: should be IntMap<FileId, Vec<ra_id::Diagnostic>>
pub(crate) native: IntMap<FileId, Vec<lsp_types::Diagnostic>>,
// FIXME: should be Vec<flycheck::Diagnostic>
pub(crate) check: NoHashHashMap<usize, NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check: IntMap<usize, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check_fixes: CheckFixes,
changes: NoHashHashSet<FileId>,
changes: IntSet<FileId>,
}
#[derive(Debug, Clone)]
@ -105,7 +106,7 @@ impl DiagnosticCollection {
native.chain(check)
}
pub(crate) fn take_changes(&mut self) -> Option<NoHashHashSet<FileId>> {
pub(crate) fn take_changes(&mut self) -> Option<IntSet<FileId>> {
if self.changes.is_empty() {
return None;
}

View file

@ -3,7 +3,6 @@
use std::collections::HashMap;
use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
use ide_db::line_index::WideEncoding;
use itertools::Itertools;
use stdx::format_to;
use vfs::{AbsPath, AbsPathBuf};
@ -80,37 +79,33 @@ fn position(
position_encoding: &PositionEncoding,
span: &DiagnosticSpan,
line_offset: usize,
column_offset: usize,
column_offset_utf32: usize,
) -> lsp_types::Position {
let line_index = line_offset - span.line_start;
let mut true_column_offset = column_offset;
if let Some(line) = span.text.get(line_index) {
if line.text.chars().count() == line.text.len() {
// all one byte utf-8 char
return lsp_types::Position {
line: (line_offset as u32).saturating_sub(1),
character: (column_offset as u32).saturating_sub(1),
};
}
let mut char_offset = 0;
let len_func = match position_encoding {
PositionEncoding::Utf8 => char::len_utf8,
PositionEncoding::Wide(WideEncoding::Utf16) => char::len_utf16,
PositionEncoding::Wide(WideEncoding::Utf32) => |_| 1,
};
for c in line.text.chars() {
char_offset += 1;
if char_offset > column_offset {
break;
let column_offset_encoded = match span.text.get(line_index) {
// Fast path.
Some(line) if line.text.is_ascii() => column_offset_utf32,
Some(line) => {
let line_prefix_len = line
.text
.char_indices()
.take(column_offset_utf32)
.last()
.map(|(pos, c)| pos + c.len_utf8())
.unwrap_or(0);
let line_prefix = &line.text[..line_prefix_len];
match position_encoding {
PositionEncoding::Utf8 => line_prefix.len(),
PositionEncoding::Wide(enc) => enc.measure(line_prefix),
}
true_column_offset += len_func(c) - 1;
}
}
None => column_offset_utf32,
};
lsp_types::Position {
line: (line_offset as u32).saturating_sub(1),
character: (true_column_offset as u32).saturating_sub(1),
character: (column_offset_encoded as u32).saturating_sub(1),
}
}

View file

@ -4,6 +4,7 @@ use std::{fmt, panic, thread};
use ide::Cancelled;
use lsp_server::ExtractError;
use serde::{de::DeserializeOwned, Serialize};
use stdx::thread::ThreadIntent;
use crate::{
global_state::{GlobalState, GlobalStateSnapshot},
@ -87,7 +88,8 @@ impl<'a> RequestDispatcher<'a> {
self
}
/// Dispatches the request onto thread pool
/// Dispatches a non-latency-sensitive request onto the thread pool
/// without retrying it if it panics.
pub(crate) fn on_no_retry<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
@ -102,7 +104,7 @@ impl<'a> RequestDispatcher<'a> {
None => return self,
};
self.global_state.task_pool.handle.spawn({
self.global_state.task_pool.handle.spawn(ThreadIntent::Worker, {
let world = self.global_state.snapshot();
move || {
let result = panic::catch_unwind(move || {
@ -123,11 +125,49 @@ impl<'a> RequestDispatcher<'a> {
self
}
/// Dispatches the request onto thread pool
/// Dispatches a non-latency-sensitive request onto the thread pool.
pub(crate) fn on<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
R::Result: Serialize,
{
self.on_with_thread_intent::<R>(ThreadIntent::Worker, f)
}
/// Dispatches a latency-sensitive request onto the thread pool.
pub(crate) fn on_latency_sensitive<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
R::Result: Serialize,
{
self.on_with_thread_intent::<R>(ThreadIntent::LatencySensitive, f)
}
pub(crate) fn finish(&mut self) {
if let Some(req) = self.req.take() {
tracing::error!("unknown request: {:?}", req);
let response = lsp_server::Response::new_err(
req.id,
lsp_server::ErrorCode::MethodNotFound as i32,
"unknown request".to_string(),
);
self.global_state.respond(response);
}
}
fn on_with_thread_intent<R>(
&mut self,
intent: ThreadIntent,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
@ -138,7 +178,7 @@ impl<'a> RequestDispatcher<'a> {
None => return self,
};
self.global_state.task_pool.handle.spawn({
self.global_state.task_pool.handle.spawn(intent, {
let world = self.global_state.snapshot();
move || {
let result = panic::catch_unwind(move || {
@ -155,18 +195,6 @@ impl<'a> RequestDispatcher<'a> {
self
}
pub(crate) fn finish(&mut self) {
if let Some(req) = self.req.take() {
tracing::error!("unknown request: {:?}", req);
let response = lsp_server::Response::new_err(
req.id,
lsp_server::ErrorCode::MethodNotFound as i32,
"unknown request".to_string(),
);
self.global_state.respond(response);
}
}
fn parse<R>(&mut self) -> Option<(lsp_server::Request, R::Params, String)>
where
R: lsp_types::request::Request,

View file

@ -31,7 +31,10 @@ pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> R
PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character },
PositionEncoding::Wide(enc) => {
let line_col = WideLineCol { line: position.line, col: position.character };
line_index.index.to_utf8(enc, line_col)
line_index
.index
.to_utf8(enc, line_col)
.ok_or_else(|| format_err!("Invalid wide col offset"))?
}
};
let text_size =
@ -98,13 +101,18 @@ pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind>
pub(crate) fn annotation(
snap: &GlobalStateSnapshot,
code_lens: lsp_types::CodeLens,
) -> Result<Annotation> {
) -> Result<Option<Annotation>> {
let data =
code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?;
let resolve = from_json::<lsp_ext::CodeLensResolveData>("CodeLensResolveData", &data)?;
match resolve {
lsp_ext::CodeLensResolveData::Impls(params) => {
match resolve.kind {
lsp_ext::CodeLensResolveDataKind::Impls(params) => {
if snap.url_file_version(&params.text_document_position_params.text_document.uri)
!= Some(resolve.version)
{
return Ok(None);
}
let pos @ FilePosition { file_id, .. } =
file_position(snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(file_id)?;
@ -114,7 +122,10 @@ pub(crate) fn annotation(
kind: AnnotationKind::HasImpls { pos, data: None },
})
}
lsp_ext::CodeLensResolveData::References(params) => {
lsp_ext::CodeLensResolveDataKind::References(params) => {
if snap.url_file_version(&params.text_document.uri) != Some(resolve.version) {
return Ok(None);
}
let pos @ FilePosition { file_id, .. } = file_position(snap, params)?;
let line_index = snap.file_line_index(file_id)?;
@ -124,4 +135,5 @@ pub(crate) fn annotation(
})
}
}
.map(Some)
}

View file

@ -3,22 +3,23 @@
//!
//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
use std::{sync::Arc, time::Instant};
use std::time::Instant;
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase};
use lsp_types::{SemanticTokens, Url};
use nohash_hasher::IntMap;
use parking_lot::{Mutex, RwLock};
use proc_macro_api::ProcMacroServer;
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
use rustc_hash::FxHashMap;
use stdx::hash::NoHashHashMap;
use triomphe::Arc;
use vfs::AnchoredPathBuf;
use crate::{
config::Config,
config::{Config, ConfigError},
diagnostics::{CheckFixes, DiagnosticCollection},
from_proto,
line_index::{LineEndings, LineIndex},
@ -51,24 +52,34 @@ pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
pub(crate) struct GlobalState {
sender: Sender<lsp_server::Message>,
req_queue: ReqQueue,
pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
pub(crate) config: Arc<Config>,
pub(crate) config_errors: Option<ConfigError>,
pub(crate) analysis_host: AnalysisHost,
pub(crate) diagnostics: DiagnosticCollection,
pub(crate) mem_docs: MemDocs,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
pub(crate) shutdown_requested: bool,
pub(crate) proc_macro_changed: bool,
pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
pub(crate) source_root_config: SourceRootConfig,
pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
// status
pub(crate) shutdown_requested: bool,
pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
// proc macros
pub(crate) proc_macro_changed: bool,
pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroServer>]>,
// Flycheck
pub(crate) flycheck: Arc<[FlycheckHandle]>,
pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
pub(crate) last_flycheck_error: Option<String>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
// VFS
pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>,
pub(crate) vfs_config_version: u32,
pub(crate) vfs_progress_config_version: u32,
pub(crate) vfs_progress_n_total: usize,
@ -92,7 +103,7 @@ pub(crate) struct GlobalState {
/// first phase is much faster, and is much less likely to fail.
///
/// This creates a complication -- by the time the second phase completes,
/// the results of the fist phase could be invalid. That is, while we run
/// the results of the first phase could be invalid. That is, while we run
/// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
/// `cargo metadata` completes before `cargo check`.
///
@ -100,11 +111,13 @@ pub(crate) struct GlobalState {
/// the user just adds comments or whitespace to Cargo.toml, we do not want
/// to invalidate any salsa caches.
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) fetch_workspaces_queue: OpQueue<Option<Vec<anyhow::Result<ProjectWorkspace>>>>,
pub(crate) fetch_build_data_queue:
OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
pub(crate) prime_caches_queue: OpQueue<()>,
// op queues
pub(crate) fetch_workspaces_queue: OpQueue<(), Option<Vec<anyhow::Result<ProjectWorkspace>>>>,
pub(crate) fetch_build_data_queue:
OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
pub(crate) prime_caches_queue: OpQueue,
}
/// An immutable snapshot of the world's state at a point in time.
@ -114,8 +127,9 @@ pub(crate) struct GlobalStateSnapshot {
pub(crate) check_fixes: CheckFixes,
mem_docs: MemDocs,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
// used to signal semantic highlighting to fall back to syntax based highlighting until proc-macros have been loaded
pub(crate) proc_macros_loaded: bool,
pub(crate) flycheck: Arc<[FlycheckHandle]>,
}
@ -138,7 +152,10 @@ impl GlobalState {
Handle { handle, receiver }
};
let analysis_host = AnalysisHost::new(config.lru_capacity());
let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity());
if let Some(capacities) = config.lru_query_capacities() {
analysis_host.update_lru_capacities(capacities);
}
let (flycheck_sender, flycheck_receiver) = unbounded();
let mut this = GlobalState {
sender,
@ -151,16 +168,21 @@ impl GlobalState {
mem_docs: MemDocs::default(),
semantic_tokens_cache: Arc::new(Default::default()),
shutdown_requested: false,
proc_macro_changed: false,
last_reported_status: None,
source_root_config: SourceRootConfig::default(),
proc_macro_clients: vec![],
config_errors: Default::default(),
flycheck: Arc::new([]),
proc_macro_changed: false,
// FIXME: use `Arc::from_iter` when it becomes available
proc_macro_clients: Arc::from(Vec::new()),
// FIXME: use `Arc::from_iter` when it becomes available
flycheck: Arc::from(Vec::new()),
flycheck_sender,
flycheck_receiver,
last_flycheck_error: None,
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), NoHashHashMap::default()))),
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
vfs_config_version: 0,
vfs_progress_config_version: 0,
vfs_progress_n_total: 0,
@ -168,9 +190,10 @@ impl GlobalState {
workspaces: Arc::new(Vec::new()),
fetch_workspaces_queue: OpQueue::default(),
prime_caches_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),
fetch_proc_macros_queue: OpQueue::default(),
prime_caches_queue: OpQueue::default(),
};
// Apply any required database inputs from the config.
this.update_configuration(config);
@ -185,7 +208,7 @@ impl GlobalState {
let (change, changed_files) = {
let mut change = Change::new();
let (vfs, line_endings_map) = &mut *self.vfs.write();
let mut changed_files = vfs.take_changes();
let changed_files = vfs.take_changes();
if changed_files.is_empty() {
return false;
}
@ -193,7 +216,7 @@ impl GlobalState {
// We need to fix up the changed events a bit. If we have a create or modify for a file
// id that is followed by a delete we actually skip observing the file text from the
// earlier event, to avoid problems later on.
for changed_file in &changed_files {
for changed_file in changed_files {
use vfs::ChangeKind::*;
file_changes
@ -229,14 +252,13 @@ impl GlobalState {
));
}
changed_files.extend(
file_changes
.into_iter()
.filter(|(_, (change_kind, just_created))| {
!matches!((change_kind, just_created), (vfs::ChangeKind::Delete, true))
})
.map(|(file_id, (change_kind, _))| vfs::ChangedFile { file_id, change_kind }),
);
let changed_files: Vec<_> = file_changes
.into_iter()
.filter(|(_, (change_kind, just_created))| {
!matches!((change_kind, just_created), (vfs::ChangeKind::Delete, true))
})
.map(|(file_id, (change_kind, _))| vfs::ChangedFile { file_id, change_kind })
.collect();
// A file was added or deleted
let mut has_structure_changes = false;
@ -261,7 +283,7 @@ impl GlobalState {
String::from_utf8(bytes).ok().and_then(|text| {
let (text, line_endings) = LineEndings::normalize(text);
line_endings_map.insert(file.file_id, line_endings);
Some(Arc::new(text))
Some(Arc::from(text))
})
} else {
None
@ -280,11 +302,11 @@ impl GlobalState {
{
let raw_database = self.analysis_host.raw_database();
// FIXME: ideally we should only trigger a workspace fetch for non-library changes
// but somethings going wrong with the source root business when we add a new local
// but something's going wrong with the source root business when we add a new local
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029
if let Some(path) = workspace_structure_change {
self.fetch_workspaces_queue
.request_op(format!("workspace vfs file change: {}", path.display()));
.request_op(format!("workspace vfs file change: {}", path.display()), ());
}
self.proc_macro_changed =
changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
@ -307,7 +329,8 @@ impl GlobalState {
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
mem_docs: self.mem_docs.clone(),
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(),
proc_macros_loaded: !self.config.expand_proc_macros()
|| *self.fetch_proc_macros_queue.last_op_result(),
flycheck: self.flycheck.clone(),
}
}
@ -331,7 +354,7 @@ impl GlobalState {
}
pub(crate) fn send_notification<N: lsp_types::notification::Notification>(
&mut self,
&self,
params: N::Params,
) {
let not = lsp_server::Notification::new(N::METHOD.to_string(), params);
@ -372,7 +395,7 @@ impl GlobalState {
self.req_queue.incoming.is_completed(&request.id)
}
fn send(&mut self, message: lsp_server::Message) {
fn send(&self, message: lsp_server::Message) {
self.sender.send(message).unwrap()
}
}
@ -431,6 +454,10 @@ impl GlobalStateSnapshot {
ProjectWorkspace::DetachedFiles { .. } => None,
})
}
pub(crate) fn vfs_memory_usage(&self) -> usize {
self.vfs.read().0.memory_usage()
}
}
pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {

View file

@ -0,0 +1,334 @@
//! This module is responsible for implementing handlers for Language Server
//! Protocol. This module specifically handles notifications.
use std::ops::Deref;
use itertools::Itertools;
use lsp_types::{
CancelParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, WorkDoneProgressCancelParams,
};
use triomphe::Arc;
use vfs::{AbsPathBuf, ChangeKind, VfsPath};
use crate::{
config::Config, from_proto, global_state::GlobalState, lsp_ext::RunFlycheckParams,
lsp_utils::apply_document_changes, mem_docs::DocumentData, reload, Result,
};
pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Result<()> {
let id: lsp_server::RequestId = match params.id {
lsp_types::NumberOrString::Number(id) => id.into(),
lsp_types::NumberOrString::String(id) => id.into(),
};
state.cancel(id);
Ok(())
}
pub(crate) fn handle_work_done_progress_cancel(
state: &mut GlobalState,
params: WorkDoneProgressCancelParams,
) -> Result<()> {
if let lsp_types::NumberOrString::String(s) = &params.token {
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
if let Ok(id) = u32::from_str_radix(id, 10) {
if let Some(flycheck) = state.flycheck.get(id as usize) {
flycheck.cancel();
}
}
}
}
// Just ignore this. It is OK to continue sending progress
// notifications for this token, as the client can't know when
// we accepted notification.
Ok(())
}
pub(crate) fn handle_did_open_text_document(
state: &mut GlobalState,
params: DidOpenTextDocumentParams,
) -> Result<()> {
let _p = profile::span("handle_did_open_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let already_exists = state
.mem_docs
.insert(path.clone(), DocumentData::new(params.text_document.version))
.is_err();
if already_exists {
tracing::error!("duplicate DidOpenTextDocument: {}", path);
}
state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes()));
}
Ok(())
}
pub(crate) fn handle_did_change_text_document(
state: &mut GlobalState,
params: DidChangeTextDocumentParams,
) -> Result<()> {
let _p = profile::span("handle_did_change_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
match state.mem_docs.get_mut(&path) {
Some(doc) => {
// The version passed in DidChangeTextDocument is the version after all edits are applied
// so we should apply it before the vfs is notified.
doc.version = params.text_document.version;
}
None => {
tracing::error!("unexpected DidChangeTextDocument: {}", path);
return Ok(());
}
};
let vfs = &mut state.vfs.write().0;
let file_id = vfs.file_id(&path).unwrap();
let text = apply_document_changes(
state.config.position_encoding(),
|| std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(),
params.content_changes,
);
vfs.set_file_contents(path, Some(text.into_bytes()));
}
Ok(())
}
pub(crate) fn handle_did_close_text_document(
state: &mut GlobalState,
params: DidCloseTextDocumentParams,
) -> Result<()> {
let _p = profile::span("handle_did_close_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
if state.mem_docs.remove(&path).is_err() {
tracing::error!("orphan DidCloseTextDocument: {}", path);
}
state.semantic_tokens_cache.lock().remove(&params.text_document.uri);
if let Some(path) = path.as_path() {
state.loader.handle.invalidate(path.to_path_buf());
}
}
Ok(())
}
pub(crate) fn handle_did_save_text_document(
state: &mut GlobalState,
params: DidSaveTextDocumentParams,
) -> Result<()> {
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
state
.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()), ());
}
}
if !state.config.check_on_save() || run_flycheck(state, vfs_path) {
return Ok(());
}
} else if state.config.check_on_save() {
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in state.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
}
pub(crate) fn handle_did_change_configuration(
state: &mut GlobalState,
_params: DidChangeConfigurationParams,
) -> Result<()> {
// As stated in https://github.com/microsoft/language-server-protocol/issues/676,
// this notification's parameters should be ignored and the actual config queried separately.
state.send_request::<lsp_types::request::WorkspaceConfiguration>(
lsp_types::ConfigurationParams {
items: vec![lsp_types::ConfigurationItem {
scope_uri: None,
section: Some("rust-analyzer".to_string()),
}],
},
|this, resp| {
tracing::debug!("config update response: '{:?}", resp);
let lsp_server::Response { error, result, .. } = resp;
match (error, result) {
(Some(err), _) => {
tracing::error!("failed to fetch the server settings: {:?}", err)
}
(None, Some(mut configs)) => {
if let Some(json) = configs.get_mut(0) {
// Note that json can be null according to the spec if the client can't
// provide a configuration. This is handled in Config::update below.
let mut config = Config::clone(&*this.config);
this.config_errors = config.update(json.take()).err();
this.update_configuration(config);
}
}
(None, None) => {
tracing::error!("received empty server settings response from the client")
}
}
},
);
Ok(())
}
pub(crate) fn handle_did_change_workspace_folders(
state: &mut GlobalState,
params: DidChangeWorkspaceFoldersParams,
) -> Result<()> {
let config = Arc::make_mut(&mut state.config);
for workspace in params.event.removed {
let Ok(path) = workspace.uri.to_file_path() else { continue };
let Ok(path) = AbsPathBuf::try_from(path) else { continue };
config.remove_workspace(&path);
}
let added = params
.event
.added
.into_iter()
.filter_map(|it| it.uri.to_file_path().ok())
.filter_map(|it| AbsPathBuf::try_from(it).ok());
config.add_workspaces(added);
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
state.fetch_workspaces_queue.request_op("client workspaces changed".to_string(), ())
}
Ok(())
}
pub(crate) fn handle_did_change_watched_files(
state: &mut GlobalState,
params: DidChangeWatchedFilesParams,
) -> Result<()> {
for change in params.changes {
if let Ok(path) = from_proto::abs_path(&change.uri) {
state.loader.handle.invalidate(path);
}
}
Ok(())
}
fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
let _p = profile::span("run_flycheck");
let file_id = state.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = state.snapshot();
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Trigger flychecks for all workspaces that depend on the saved file
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = world
.analysis
.crates_for(file_id)?
.into_iter()
.flat_map(|id| world.analysis.transitive_rev_deps(id))
.flatten()
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
world
.analysis
.crate_root(crate_id)
.map(|file_id| {
world.file_id_to_file_path(file_id).as_path().map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> = crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids = world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg]
.targets
.iter()
.any(|&it| crate_root_paths.contains(&cargo[it].root.as_path()))
})
}
project_model::ProjectWorkspace::Json { project, .. } => {
project.crates().any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c))
}
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in world.flycheck.iter() {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.restart();
continue;
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
if !updated {
for flycheck in world.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
};
state.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, move |_| {
if let Err(e) = std::panic::catch_unwind(task) {
tracing::error!("flycheck task panicked: {e:?}")
}
});
true
} else {
false
}
}
pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
let _p = profile::span("handle_stop_flycheck");
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
Ok(())
}
pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
let _p = profile::span("handle_clear_flycheck");
state.diagnostics.clear_check_all();
Ok(())
}
pub(crate) fn handle_run_flycheck(
state: &mut GlobalState,
params: RunFlycheckParams,
) -> Result<()> {
let _p = profile::span("handle_run_flycheck");
if let Some(text_document) = params.text_document {
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
if run_flycheck(state, vfs_path) {
return Ok(());
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in state.flycheck.iter() {
flycheck.restart();
}
Ok(())
}

View file

@ -1,35 +1,36 @@
//! This module is responsible for implementing handlers for Language Server
//! Protocol. The majority of requests are fulfilled by calling into the
//! `ide` crate.
//! Protocol. This module specifically handles requests.
use std::{
fs,
io::Write as _,
process::{self, Stdio},
};
use anyhow::Context;
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FileId, FilePosition,
FileRange, HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable,
RunnableKind, SingleResolve, SourceChange, TextEdit,
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind,
SingleResolve, SourceChange, TextEdit,
};
use ide_db::SymbolKind;
use lsp_server::ErrorCode;
use lsp_types::{
CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
CodeLens, CompletionItem, Diagnostic, DiagnosticTag, DocumentFormattingParams, FoldingRange,
FoldingRangeParams, HoverContents, InlayHint, InlayHintParams, Location, LocationLink,
NumberOrString, Position, PrepareRenameResponse, Range, RenameParams,
SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
CodeLens, CompletionItem, DocumentFormattingParams, FoldingRange, FoldingRangeParams,
HoverContents, InlayHint, InlayHintParams, Location, LocationLink, Position,
PrepareRenameResponse, Range, RenameParams, SemanticTokensDeltaParams,
SemanticTokensFullDeltaResult, SemanticTokensParams, SemanticTokensRangeParams,
SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag,
TextDocumentIdentifier, Url, WorkspaceEdit,
};
use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
use serde_json::json;
use stdx::{format_to, never};
use syntax::{algo, ast, AstNode, TextRange, TextSize};
use vfs::{AbsPath, AbsPathBuf};
use triomphe::Arc;
use vfs::{AbsPath, AbsPathBuf, VfsPath};
use crate::{
cargo_target_spec::CargoTargetSpec,
@ -38,23 +39,29 @@ use crate::{
from_proto,
global_state::{GlobalState, GlobalStateSnapshot},
line_index::LineEndings,
lsp_ext::{self, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams},
lsp_ext::{
self, CrateInfoResult, ExternalDocsPair, ExternalDocsResponse, FetchDependencyListParams,
FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams,
},
lsp_utils::{all_edits_are_disjoint, invalid_params_error},
to_proto, LspError, Result,
};
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
state.proc_macro_clients.clear();
// FIXME: use `Arc::from_iter` when it becomes available
state.proc_macro_clients = Arc::from(Vec::new());
state.proc_macro_changed = false;
state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
state.fetch_build_data_queue.request_op("reload workspace request".to_string());
state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), ());
Ok(())
}
pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
let _p = profile::span("handle_stop_flycheck");
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Result<()> {
// FIXME: use `Arc::from_iter` when it becomes available
state.proc_macro_clients = Arc::from(Vec::new());
state.proc_macro_changed = false;
state.fetch_build_data_queue.request_op("rebuild proc macros request".to_string(), ());
Ok(())
}
@ -95,6 +102,7 @@ pub(crate) fn handle_analyzer_status(
.collect::<Vec<&AbsPath>>()
);
}
format_to!(buf, "\nVfs memory usage: {}\n", profile::Bytes::new(snap.vfs_memory_usage() as _));
buf.push_str("\nAnalysis:\n");
buf.push_str(
&snap
@ -154,6 +162,16 @@ pub(crate) fn handle_view_mir(
Ok(res)
}
pub(crate) fn handle_interpret_function(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<String> {
let _p = profile::span("handle_interpret_function");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.interpret_function(position)?;
Ok(res)
}
pub(crate) fn handle_view_file_text(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentIdentifier,
@ -502,7 +520,10 @@ pub(crate) fn handle_workspace_symbol(
#[allow(deprecated)]
let info = SymbolInformation {
name: nav.name.to_string(),
name: match &nav.alias {
Some(alias) => format!("{} (alias for {})", alias, nav.name),
None => format!("{}", nav.name),
},
kind: nav
.kind
.map(to_proto::symbol_kind)
@ -747,20 +768,25 @@ pub(crate) fn handle_runnables(
let config = snap.config.runnables();
match cargo_spec {
Some(spec) => {
let all_targets = !snap.analysis.is_crate_no_std(spec.crate_id)?;
for cmd in ["check", "test"] {
let mut cargo_args =
vec![cmd.to_owned(), "--package".to_owned(), spec.package.clone()];
if all_targets {
cargo_args.push("--all-targets".to_owned());
}
res.push(lsp_ext::Runnable {
label: format!("cargo {cmd} -p {} --all-targets", spec.package),
label: format!(
"cargo {cmd} -p {}{all_targets}",
spec.package,
all_targets = if all_targets { " --all-targets" } else { "" }
),
location: None,
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
workspace_root: Some(spec.workspace_root.clone().into()),
override_cargo: config.override_cargo.clone(),
cargo_args: vec![
cmd.to_string(),
"--package".to_string(),
spec.package.clone(),
"--all-targets".to_string(),
],
cargo_args,
cargo_extra_args: config.cargo_extra_args.clone(),
executable_args: Vec::new(),
expect_test: None,
@ -769,14 +795,7 @@ pub(crate) fn handle_runnables(
}
}
None => {
if !snap.config.linked_projects().is_empty()
|| !snap
.config
.discovered_projects
.as_ref()
.map(|projects| projects.is_empty())
.unwrap_or(true)
{
if !snap.config.linked_projects().is_empty() {
res.push(lsp_ext::Runnable {
label: "cargo check --workspace".to_string(),
location: None,
@ -1262,7 +1281,7 @@ pub(crate) fn handle_code_lens_resolve(
snap: GlobalStateSnapshot,
code_lens: CodeLens,
) -> Result<CodeLens> {
let annotation = from_proto::annotation(&snap, code_lens.clone())?;
let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else { return Ok(code_lens) };
let annotation = snap.analysis.resolve_annotation(annotation)?;
let mut acc = Vec::new();
@ -1321,38 +1340,6 @@ pub(crate) fn handle_ssr(
to_proto::workspace_edit(&snap, source_change).map_err(Into::into)
}
pub(crate) fn publish_diagnostics(
snap: &GlobalStateSnapshot,
file_id: FileId,
) -> Result<Vec<Diagnostic>> {
let _p = profile::span("publish_diagnostics");
let line_index = snap.file_line_index(file_id)?;
let diagnostics: Vec<Diagnostic> = snap
.analysis
.diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)?
.into_iter()
.map(|d| Diagnostic {
range: to_proto::range(&line_index, d.range),
severity: Some(to_proto::diagnostic_severity(d.severity)),
code: Some(NumberOrString::String(d.code.as_str().to_string())),
code_description: Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&format!(
"https://rust-analyzer.github.io/manual.html#{}",
d.code.as_str()
))
.unwrap(),
}),
source: Some("rust-analyzer".to_string()),
message: d.message,
related_information: None,
tags: if d.unused { Some(vec![DiagnosticTag::UNNECESSARY]) } else { None },
data: None,
})
.collect();
Ok(diagnostics)
}
pub(crate) fn handle_inlay_hints(
snap: GlobalStateSnapshot,
params: InlayHintParams,
@ -1370,9 +1357,7 @@ pub(crate) fn handle_inlay_hints(
snap.analysis
.inlay_hints(&inlay_hints_config, file_id, Some(range))?
.into_iter()
.map(|it| {
to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it)
})
.map(|it| to_proto::inlay_hint(&snap, &line_index, it))
.collect::<Cancellable<Vec<_>>>()?,
))
}
@ -1493,7 +1478,13 @@ pub(crate) fn handle_semantic_tokens_full(
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
let highlights = snap.analysis.highlight(highlight_config, file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_tokens = to_proto::semantic_tokens(
&text,
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(),
);
// Unconditionally cache the tokens
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
@ -1517,7 +1508,13 @@ pub(crate) fn handle_semantic_tokens_full_delta(
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
let highlights = snap.analysis.highlight(highlight_config, file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_tokens = to_proto::semantic_tokens(
&text,
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(),
);
let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default();
@ -1551,20 +1548,53 @@ pub(crate) fn handle_semantic_tokens_range(
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
let highlights = snap.analysis.highlight_range(highlight_config, frange)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_tokens = to_proto::semantic_tokens(
&text,
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(),
);
Ok(Some(semantic_tokens.into()))
}
pub(crate) fn handle_open_docs(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<Option<lsp_types::Url>> {
) -> Result<ExternalDocsResponse> {
let _p = profile::span("handle_open_docs");
let position = from_proto::file_position(&snap, params)?;
let remote = snap.analysis.external_docs(position)?;
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match ws {
ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some((cargo, sysroot.as_ref().ok())),
ProjectWorkspace::Json { .. } => None,
ProjectWorkspace::DetachedFiles { .. } => None,
});
Ok(remote.and_then(|remote| Url::parse(&remote).ok()))
let (cargo, sysroot) = match ws_and_sysroot {
Some((ws, sysroot)) => (Some(ws), sysroot),
_ => (None, None),
};
let sysroot = sysroot.map(|p| p.root().as_os_str());
let target_dir = cargo.map(|cargo| cargo.target_directory()).map(|p| p.as_os_str());
let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else {
return if snap.config.local_docs() {
Ok(ExternalDocsResponse::WithLocal(Default::default()))
} else {
Ok(ExternalDocsResponse::Simple(None))
}
};
let web = remote_urls.web_url.and_then(|it| Url::parse(&it).ok());
let local = remote_urls.local_url.and_then(|it| Url::parse(&it).ok());
if snap.config.local_docs() {
Ok(ExternalDocsResponse::WithLocal(ExternalDocsPair { web, local }))
} else {
Ok(ExternalDocsResponse::Simple(web))
}
}
pub(crate) fn handle_open_cargo_toml(
@ -1908,3 +1938,52 @@ fn run_rustfmt(
Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text))))
}
}
pub(crate) fn fetch_dependency_list(
state: GlobalStateSnapshot,
_params: FetchDependencyListParams,
) -> Result<FetchDependencyListResult> {
let crates = state.analysis.fetch_crates()?;
let crate_infos = crates
.into_iter()
.filter_map(|it| {
let root_file_path = state.file_id_to_file_path(it.root_file_id);
crate_path(root_file_path).and_then(to_url).map(|path| CrateInfoResult {
name: it.name,
version: it.version,
path,
})
})
.collect();
Ok(FetchDependencyListResult { crates: crate_infos })
}
/// Searches for the directory of a Rust crate given this crate's root file path.
///
/// # Arguments
///
/// * `root_file_path`: The path to the root file of the crate.
///
/// # Returns
///
/// An `Option` value representing the path to the directory of the crate with the given
/// name, if such a crate is found. If no crate with the given name is found, this function
/// returns `None`.
fn crate_path(root_file_path: VfsPath) -> Option<VfsPath> {
let mut current_dir = root_file_path.parent();
while let Some(path) = current_dir {
let cargo_toml_path = path.join("../Cargo.toml")?;
if fs::metadata(cargo_toml_path.as_path()?).is_ok() {
let crate_path = cargo_toml_path.parent()?;
return Some(crate_path);
}
current_dir = path.parent();
}
None
}
fn to_url(path: VfsPath) -> Option<Url> {
let path = path.as_path()?;
let str_path = path.as_os_str().to_str()?;
Url::from_file_path(str_path).ok()
}

View file

@ -10,8 +10,6 @@
//! in release mode in VS Code. There's however "rust-analyzer: Copy Run Command Line"
//! which you can use to paste the command in terminal and add `--release` manually.
use std::sync::Arc;
use ide::{CallableSnippets, Change, CompletionConfig, FilePosition, TextSize};
use ide_db::{
imports::insert_use::{ImportGranularity, InsertUseConfig},
@ -19,6 +17,7 @@ use ide_db::{
};
use project_model::CargoConfig;
use test_utils::project_root;
use triomphe::Arc;
use vfs::{AbsPathBuf, VfsPath};
use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
@ -65,7 +64,7 @@ fn integrated_highlighting_benchmark() {
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
text.push_str("\npub fn _dummy() {}\n");
let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text)));
change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change);
}
@ -121,7 +120,7 @@ fn integrated_completion_benchmark() {
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ "sel".len();
let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text)));
change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change);
completion_offset
};
@ -160,7 +159,7 @@ fn integrated_completion_benchmark() {
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ "self.".len();
let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text)));
change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change);
completion_offset
};

View file

@ -25,7 +25,6 @@ mod diff;
mod dispatch;
mod from_proto;
mod global_state;
mod handlers;
mod line_index;
mod lsp_utils;
mod main_loop;
@ -38,6 +37,11 @@ mod task_pool;
mod to_proto;
mod version;
mod handlers {
pub(crate) mod notification;
pub(crate) mod request;
}
pub mod config;
pub mod lsp_ext;

View file

@ -5,9 +5,8 @@
//! This module does line ending conversion and detection (so that we can
//! convert back to `\r\n` on the way out).
use std::sync::Arc;
use ide_db::line_index::WideEncoding;
use triomphe::Arc;
#[derive(Clone, Copy)]
pub enum PositionEncoding {

View file

@ -4,11 +4,11 @@ use std::{collections::HashMap, path::PathBuf};
use ide_db::line_index::WideEncoding;
use lsp_types::request::Request;
use lsp_types::PositionEncodingKind;
use lsp_types::{
notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
};
use lsp_types::{PositionEncodingKind, Url};
use serde::{Deserialize, Serialize};
use crate::line_index::PositionEncoding;
@ -27,6 +27,31 @@ pub struct AnalyzerStatusParams {
pub text_document: Option<TextDocumentIdentifier>,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct CrateInfoResult {
pub name: Option<String>,
pub version: Option<String>,
pub path: Url,
}
pub enum FetchDependencyList {}
impl Request for FetchDependencyList {
type Params = FetchDependencyListParams;
type Result = FetchDependencyListResult;
const METHOD: &'static str = "rust-analyzer/fetchDependencyList";
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FetchDependencyListParams {}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FetchDependencyListResult {
pub crates: Vec<CrateInfoResult>,
}
pub enum MemoryUsage {}
impl Request for MemoryUsage {
@ -51,6 +76,14 @@ impl Request for ReloadWorkspace {
const METHOD: &'static str = "rust-analyzer/reloadWorkspace";
}
pub enum RebuildProcMacros {}
impl Request for RebuildProcMacros {
type Params = ();
type Result = ();
const METHOD: &'static str = "rust-analyzer/rebuildProcMacros";
}
pub enum SyntaxTree {}
impl Request for SyntaxTree {
@ -82,6 +115,14 @@ impl Request for ViewMir {
const METHOD: &'static str = "rust-analyzer/viewMir";
}
pub enum InterpretFunction {}
impl Request for InterpretFunction {
type Params = lsp_types::TextDocumentPositionParams;
type Result = String;
const METHOD: &'static str = "rust-analyzer/interpretFunction";
}
pub enum ViewFileText {}
impl Request for ViewFileText {
@ -343,6 +384,7 @@ impl Request for CodeActionRequest {
}
pub enum CodeActionResolveRequest {}
impl Request for CodeActionResolveRequest {
type Params = CodeAction;
type Result = CodeAction;
@ -418,7 +460,7 @@ pub enum HoverRequest {}
impl Request for HoverRequest {
type Params = HoverParams;
type Result = Option<Hover>;
const METHOD: &'static str = "textDocument/hover";
const METHOD: &'static str = lsp_types::request::HoverRequest::METHOD;
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
@ -466,10 +508,24 @@ pub enum ExternalDocs {}
impl Request for ExternalDocs {
type Params = lsp_types::TextDocumentPositionParams;
type Result = Option<lsp_types::Url>;
type Result = ExternalDocsResponse;
const METHOD: &'static str = "experimental/externalDocs";
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
#[serde(untagged)]
pub enum ExternalDocsResponse {
Simple(Option<lsp_types::Url>),
WithLocal(ExternalDocsPair),
}
#[derive(Debug, Default, PartialEq, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ExternalDocsPair {
pub web: Option<lsp_types::Url>,
pub local: Option<lsp_types::Url>,
}
pub enum OpenCargoToml {}
impl Request for OpenCargoToml {
@ -487,7 +543,14 @@ pub struct OpenCargoTomlParams {
/// Information about CodeLens, that is to be resolved.
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) enum CodeLensResolveData {
pub struct CodeLensResolveData {
pub version: i32,
pub kind: CodeLensResolveDataKind,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum CodeLensResolveDataKind {
Impls(lsp_types::request::GotoImplementationParams),
References(lsp_types::TextDocumentPositionParams),
}

View file

@ -1,8 +1,9 @@
//! Utilities for LSP-related boilerplate code.
use std::{mem, ops::Range, sync::Arc};
use std::{mem, ops::Range};
use lsp_server::Notification;
use lsp_types::request::Request;
use triomphe::Arc;
use crate::{
from_proto,
@ -80,39 +81,14 @@ impl GlobalState {
match additional_info {
Some(additional_info) => {
tracing::error!("{}:\n{}", &message, &additional_info);
match self.config.open_server_logs() && tracing::enabled!(tracing::Level::ERROR) {
true => self.send_request::<lsp_types::request::ShowMessageRequest>(
lsp_types::ShowMessageRequestParams {
typ: lsp_types::MessageType::ERROR,
message,
actions: Some(vec![lsp_types::MessageActionItem {
title: "Open server logs".to_owned(),
properties: Default::default(),
}]),
},
|this, resp| {
let lsp_server::Response { error: None, result: Some(result), .. } = resp
else { return };
if let Ok(Some(_item)) = crate::from_json::<
<lsp_types::request::ShowMessageRequest as lsp_types::request::Request>::Result,
>(
lsp_types::request::ShowMessageRequest::METHOD, &result
) {
this.send_notification::<lsp_ext::OpenServerLogs>(());
}
},
),
false => self.send_notification::<lsp_types::notification::ShowMessage>(
lsp_types::ShowMessageParams {
typ: lsp_types::MessageType::ERROR,
message,
},
),
}
self.show_message(
lsp_types::MessageType::ERROR,
message,
tracing::enabled!(tracing::Level::ERROR),
);
}
None => {
tracing::error!("{}", &message);
self.send_notification::<lsp_types::notification::ShowMessage>(
lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message },
);

View file

@ -2,8 +2,6 @@
//! requests/replies and notifications back to the client.
use std::{
fmt,
ops::Deref,
sync::Arc,
time::{Duration, Instant},
};
@ -11,20 +9,19 @@ use always_assert::always;
use crossbeam_channel::{select, Receiver};
use flycheck::FlycheckHandle;
use ide_db::base_db::{SourceDatabaseExt, VfsPath};
use itertools::Itertools;
use lsp_server::{Connection, Notification, Request};
use lsp_types::notification::Notification as _;
use vfs::{AbsPathBuf, ChangeKind, FileId};
use triomphe::Arc;
use vfs::FileId;
use crate::{
config::Config,
dispatch::{NotificationDispatcher, RequestDispatcher},
from_proto,
global_state::{file_id_to_url, url_to_file_id, GlobalState},
handlers, lsp_ext,
lsp_utils::{apply_document_changes, notification_is, Progress},
mem_docs::DocumentData,
reload::{self, BuildDataProgress, ProjectWorkspaceProgress},
lsp_ext,
lsp_utils::{notification_is, Progress},
reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
Result,
};
@ -36,7 +33,7 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
// temporary bumped. This optimization backfires in our case: each time the
// `main_loop` schedules a task to run on a threadpool, the worker threads
// gets a higher priority, and (on a machine with fewer cores) displaces the
// main loop! We work-around this by marking the main loop as a
// main loop! We work around this by marking the main loop as a
// higher-priority thread.
//
// https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities
@ -68,6 +65,7 @@ pub(crate) enum Task {
PrimeCaches(PrimeCachesProgress),
FetchWorkspace(ProjectWorkspaceProgress),
FetchBuildData(BuildDataProgress),
LoadProcMacros(ProcMacroProgress),
}
#[derive(Debug)]
@ -79,7 +77,7 @@ pub(crate) enum PrimeCachesProgress {
impl fmt::Debug for Event {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter<'_>| {
let debug_non_verbose = |not: &Notification, f: &mut fmt::Formatter<'_>| {
f.debug_struct("Notification").field("method", &not.method).finish()
};
@ -88,7 +86,7 @@ impl fmt::Debug for Event {
if notification_is::<lsp_types::notification::DidOpenTextDocument>(not)
|| notification_is::<lsp_types::notification::DidChangeTextDocument>(not)
{
return debug_verbose_not(not, f);
return debug_non_verbose(not, f);
}
}
Event::Task(Task::Response(resp)) => {
@ -114,57 +112,63 @@ impl GlobalState {
self.update_status_or_notify();
if self.config.did_save_text_document_dynamic_registration() {
let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
include_text: Some(false),
text_document_registration_options: lsp_types::TextDocumentRegistrationOptions {
document_selector: Some(vec![
lsp_types::DocumentFilter {
language: None,
scheme: None,
pattern: Some("**/*.rs".into()),
},
lsp_types::DocumentFilter {
language: None,
scheme: None,
pattern: Some("**/Cargo.toml".into()),
},
lsp_types::DocumentFilter {
language: None,
scheme: None,
pattern: Some("**/Cargo.lock".into()),
},
]),
},
};
let registration = lsp_types::Registration {
id: "textDocument/didSave".to_string(),
method: "textDocument/didSave".to_string(),
register_options: Some(serde_json::to_value(save_registration_options).unwrap()),
};
self.send_request::<lsp_types::request::RegisterCapability>(
lsp_types::RegistrationParams { registrations: vec![registration] },
|_, _| (),
);
self.register_did_save_capability();
}
self.fetch_workspaces_queue.request_op("startup".to_string());
if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
self.fetch_workspaces_queue.request_op("startup".to_string(), ());
if let Some((cause, ())) = self.fetch_workspaces_queue.should_start_op() {
self.fetch_workspaces(cause);
}
while let Some(event) = self.next_event(&inbox) {
if let Event::Lsp(lsp_server::Message::Notification(not)) = &event {
if not.method == lsp_types::notification::Exit::METHOD {
return Ok(());
}
if matches!(
&event,
Event::Lsp(lsp_server::Message::Notification(Notification { method, .. }))
if method == lsp_types::notification::Exit::METHOD
) {
return Ok(());
}
self.handle_event(event)?
self.handle_event(event)?;
}
Err("client exited without proper shutdown sequence".into())
}
fn register_did_save_capability(&mut self) {
let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
include_text: Some(false),
text_document_registration_options: lsp_types::TextDocumentRegistrationOptions {
document_selector: Some(vec![
lsp_types::DocumentFilter {
language: None,
scheme: None,
pattern: Some("**/*.rs".into()),
},
lsp_types::DocumentFilter {
language: None,
scheme: None,
pattern: Some("**/Cargo.toml".into()),
},
lsp_types::DocumentFilter {
language: None,
scheme: None,
pattern: Some("**/Cargo.lock".into()),
},
]),
},
};
let registration = lsp_types::Registration {
id: "textDocument/didSave".to_string(),
method: "textDocument/didSave".to_string(),
register_options: Some(serde_json::to_value(save_registration_options).unwrap()),
};
self.send_request::<lsp_types::request::RegisterCapability>(
lsp_types::RegistrationParams { registrations: vec![registration] },
|_, _| (),
);
}
fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
select! {
recv(inbox) -> msg =>
@ -186,19 +190,20 @@ impl GlobalState {
// NOTE: don't count blocking select! call as a loop-turn time
let _p = profile::span("GlobalState::handle_event");
tracing::debug!("{:?} handle_event({:?})", loop_start, event);
let task_queue_len = self.task_pool.handle.len();
if task_queue_len > 0 {
tracing::info!("task queue len: {}", task_queue_len);
let event_dbg_msg = format!("{event:?}");
tracing::debug!("{:?} handle_event({})", loop_start, event_dbg_msg);
if tracing::enabled!(tracing::Level::INFO) {
let task_queue_len = self.task_pool.handle.len();
if task_queue_len > 0 {
tracing::info!("task queue len: {}", task_queue_len);
}
}
let was_quiescent = self.is_quiescent();
match event {
Event::Lsp(msg) => match msg {
lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
lsp_server::Message::Notification(not) => {
self.on_notification(not)?;
}
lsp_server::Message::Notification(not) => self.on_notification(not)?,
lsp_server::Message::Response(resp) => self.complete_request(resp),
},
Event::Task(task) => {
@ -247,7 +252,7 @@ impl GlobalState {
self.prime_caches_queue.op_completed(());
if cancelled {
self.prime_caches_queue
.request_op("restart after cancellation".to_string());
.request_op("restart after cancellation".to_string(), ());
}
}
};
@ -279,7 +284,8 @@ impl GlobalState {
if self.is_quiescent() {
let became_quiescent = !(was_quiescent
|| self.fetch_workspaces_queue.op_requested()
|| self.fetch_build_data_queue.op_requested());
|| self.fetch_build_data_queue.op_requested()
|| self.fetch_proc_macros_queue.op_requested());
if became_quiescent {
if self.config.check_on_save() {
@ -287,11 +293,12 @@ impl GlobalState {
self.flycheck.iter().for_each(FlycheckHandle::restart);
}
if self.config.prefill_caches() {
self.prime_caches_queue.request_op("became quiescent".to_string());
self.prime_caches_queue.request_op("became quiescent".to_string(), ());
}
}
if !was_quiescent || state_changed {
let client_refresh = !was_quiescent || state_changed;
if client_refresh {
// Refresh semantic tokens if the client supports it.
if self.config.semantic_tokens_refresh() {
self.semantic_tokens_cache.lock().clear();
@ -309,9 +316,9 @@ impl GlobalState {
}
}
if (!was_quiescent || state_changed || memdocs_added_or_removed)
&& self.config.publish_diagnostics()
{
let update_diagnostics = (!was_quiescent || state_changed || memdocs_added_or_removed)
&& self.config.publish_diagnostics();
if update_diagnostics {
self.update_diagnostics()
}
}
@ -357,48 +364,54 @@ impl GlobalState {
}
if self.config.cargo_autoreload() {
if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
if let Some((cause, ())) = self.fetch_workspaces_queue.should_start_op() {
self.fetch_workspaces(cause);
}
}
if !self.fetch_workspaces_queue.op_in_progress() {
if let Some(cause) = self.fetch_build_data_queue.should_start_op() {
if let Some((cause, ())) = self.fetch_build_data_queue.should_start_op() {
self.fetch_build_data(cause);
} else if let Some((cause, paths)) = self.fetch_proc_macros_queue.should_start_op() {
self.fetch_proc_macros(cause, paths);
}
}
if let Some(cause) = self.prime_caches_queue.should_start_op() {
tracing::debug!(%cause, "will prime caches");
let num_worker_threads = self.config.prime_caches_num_threads();
self.task_pool.handle.spawn_with_sender({
let analysis = self.snapshot().analysis;
move |sender| {
sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
let report = PrimeCachesProgress::Report(progress);
sender.send(Task::PrimeCaches(report)).unwrap();
});
sender
.send(Task::PrimeCaches(PrimeCachesProgress::End {
cancelled: res.is_err(),
}))
.unwrap();
}
});
if let Some((cause, ())) = self.prime_caches_queue.should_start_op() {
self.prime_caches(cause);
}
self.update_status_or_notify();
let loop_duration = loop_start.elapsed();
if loop_duration > Duration::from_millis(100) && was_quiescent {
tracing::warn!("overly long loop turn: {:?}", loop_duration);
self.poke_rust_analyzer_developer(format!("overly long loop turn: {loop_duration:?}"));
tracing::warn!("overly long loop turn took {loop_duration:?}: {event_dbg_msg}");
self.poke_rust_analyzer_developer(format!(
"overly long loop turn took {loop_duration:?}: {event_dbg_msg}"
));
}
Ok(())
}
fn prime_caches(&mut self, cause: String) {
tracing::debug!(%cause, "will prime caches");
let num_worker_threads = self.config.prime_caches_num_threads();
self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, {
let analysis = self.snapshot().analysis;
move |sender| {
sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
let report = PrimeCachesProgress::Report(progress);
sender.send(Task::PrimeCaches(report)).unwrap();
});
sender
.send(Task::PrimeCaches(PrimeCachesProgress::End { cancelled: res.is_err() }))
.unwrap();
}
});
}
fn update_status_or_notify(&mut self) {
let status = self.current_status();
if self.last_reported_status.as_ref() != Some(&status) {
@ -406,7 +419,11 @@ impl GlobalState {
if self.config.server_status_notification() {
self.send_notification::<lsp_ext::ServerStatusNotification>(status);
} else if let (health, Some(message)) = (status.health, &status.message) {
} else if let (
health @ (lsp_ext::Health::Warning | lsp_ext::Health::Error),
Some(message),
) = (status.health, &status.message)
{
let open_log_button = tracing::enabled!(tracing::Level::ERROR)
&& (self.fetch_build_data_error().is_err()
|| self.fetch_workspace_error().is_err());
@ -462,7 +479,8 @@ impl GlobalState {
let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
if self.config.run_build_scripts() && workspaces_updated {
self.fetch_build_data_queue.request_op(format!("workspace updated"));
self.fetch_build_data_queue
.request_op(format!("workspace updated"), ());
}
(Progress::End, None)
@ -487,6 +505,22 @@ impl GlobalState {
}
};
if let Some(state) = state {
self.report_progress("Building", state, msg, None, None);
}
}
Task::LoadProcMacros(progress) => {
let (state, msg) = match progress {
ProcMacroProgress::Begin => (Some(Progress::Begin), None),
ProcMacroProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
ProcMacroProgress::End(proc_macro_load_result) => {
self.fetch_proc_macros_queue.op_completed(true);
self.set_proc_macros(proc_macro_load_result);
(Some(Progress::End), None)
}
};
if let Some(state) = state {
self.report_progress("Loading", state, msg, None, None);
}
@ -568,21 +602,18 @@ impl GlobalState {
(Progress::Begin, None)
}
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
flycheck::Progress::DidCancel => (Progress::End, None),
flycheck::Progress::DidCancel => {
self.last_flycheck_error = None;
(Progress::End, None)
}
flycheck::Progress::DidFailToRestart(err) => {
self.show_and_log_error(
"cargo check failed to start".to_string(),
Some(err),
);
self.last_flycheck_error =
Some(format!("cargo check failed to start: {err}"));
return;
}
flycheck::Progress::DidFinish(result) => {
if let Err(err) = result {
self.show_and_log_error(
"cargo check failed".to_string(),
Some(err.to_string()),
);
}
self.last_flycheck_error =
result.err().map(|err| format!("cargo check failed to start: {err}"));
(Progress::End, None)
}
};
@ -631,18 +662,54 @@ impl GlobalState {
_ => (),
}
use crate::handlers::request as handlers;
dispatcher
// Request handlers that must run on the main thread
// because they mutate GlobalState:
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
.on_sync_mut::<lsp_ext::RebuildProcMacros>(handlers::handle_proc_macros_rebuild)
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
// Request handlers which are related to the user typing
// are run on the main thread to reduce latency:
.on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
.on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
.on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
.on_sync::<lsp_ext::MatchingBrace>(handlers::handle_matching_brace)
.on_sync::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
// We cant run latency-sensitive request handlers which do semantic
// analysis on the main thread because that would block other
// requests. Instead, we run these request handlers on higher priority
// threads in the threadpool.
.on_latency_sensitive::<lsp_types::request::Completion>(handlers::handle_completion)
.on_latency_sensitive::<lsp_types::request::ResolveCompletionItem>(
handlers::handle_completion_resolve,
)
.on_latency_sensitive::<lsp_types::request::SemanticTokensFullRequest>(
handlers::handle_semantic_tokens_full,
)
.on_latency_sensitive::<lsp_types::request::SemanticTokensFullDeltaRequest>(
handlers::handle_semantic_tokens_full_delta,
)
.on_latency_sensitive::<lsp_types::request::SemanticTokensRangeRequest>(
handlers::handle_semantic_tokens_range,
)
// Formatting is not caused by the user typing,
// but it does qualify as latency-sensitive
// because a delay before formatting is applied
// can be confusing for the user.
.on_latency_sensitive::<lsp_types::request::Formatting>(handlers::handle_formatting)
.on_latency_sensitive::<lsp_types::request::RangeFormatting>(
handlers::handle_range_formatting,
)
// All other request handlers
.on::<lsp_ext::FetchDependencyList>(handlers::fetch_dependency_list)
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
.on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
.on::<lsp_ext::ViewMir>(handlers::handle_view_mir)
.on::<lsp_ext::InterpretFunction>(handlers::handle_interpret_function)
.on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
.on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
.on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
@ -657,7 +724,6 @@ impl GlobalState {
.on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
.on::<lsp_ext::MoveItem>(handlers::handle_move_item)
.on::<lsp_ext::WorkspaceSymbol>(handlers::handle_workspace_symbol)
.on::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
.on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)
.on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
.on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
@ -665,8 +731,6 @@ impl GlobalState {
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
.on_no_retry::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
.on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
.on::<lsp_types::request::Completion>(handlers::handle_completion)
.on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
.on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
.on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
.on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)
@ -674,8 +738,6 @@ impl GlobalState {
.on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)
.on::<lsp_types::request::Rename>(handlers::handle_rename)
.on::<lsp_types::request::References>(handlers::handle_references)
.on::<lsp_types::request::Formatting>(handlers::handle_formatting)
.on::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting)
.on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)
.on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)
.on::<lsp_types::request::CallHierarchyIncomingCalls>(
@ -684,15 +746,6 @@ impl GlobalState {
.on::<lsp_types::request::CallHierarchyOutgoingCalls>(
handlers::handle_call_hierarchy_outgoing,
)
.on::<lsp_types::request::SemanticTokensFullRequest>(
handlers::handle_semantic_tokens_full,
)
.on::<lsp_types::request::SemanticTokensFullDeltaRequest>(
handlers::handle_semantic_tokens_full_delta,
)
.on::<lsp_types::request::SemanticTokensRangeRequest>(
handlers::handle_semantic_tokens_range,
)
.on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
.on::<lsp_ext::Ssr>(handlers::handle_ssr)
.finish();
@ -700,282 +753,22 @@ impl GlobalState {
/// Handles an incoming notification.
fn on_notification(&mut self, not: Notification) -> Result<()> {
// FIXME: Move these implementations out into a module similar to on_request
fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool {
let file_id = this.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = this.snapshot();
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Trigger flychecks for all workspaces that depend on the saved file
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = world
.analysis
.crates_for(file_id)?
.into_iter()
.flat_map(|id| world.analysis.transitive_rev_deps(id))
.flatten()
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
world
.analysis
.crate_root(crate_id)
.map(|file_id| {
world
.file_id_to_file_path(file_id)
.as_path()
.map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> =
crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids =
world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| {
crate_root_paths.contains(&cargo[it].root.as_path())
})
})
}
project_model::ProjectWorkspace::Json { project, .. } => project
.crates()
.any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in world.flycheck.iter() {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.restart();
continue;
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
if !updated {
for flycheck in world.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
};
this.task_pool.handle.spawn_with_sender(move |_| {
if let Err(e) = std::panic::catch_unwind(task) {
tracing::error!("flycheck task panicked: {e:?}")
}
});
true
} else {
false
}
}
use crate::handlers::notification as handlers;
use lsp_types::notification as notifs;
NotificationDispatcher { not: Some(not), global_state: self }
.on::<lsp_types::notification::Cancel>(|this, params| {
let id: lsp_server::RequestId = match params.id {
lsp_types::NumberOrString::Number(id) => id.into(),
lsp_types::NumberOrString::String(id) => id.into(),
};
this.cancel(id);
Ok(())
})?
.on::<lsp_types::notification::WorkDoneProgressCancel>(|this, params| {
if let lsp_types::NumberOrString::String(s) = &params.token {
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
if let Ok(id) = u32::from_str_radix(id, 10) {
if let Some(flycheck) = this.flycheck.get(id as usize) {
flycheck.cancel();
}
}
}
}
// Just ignore this. It is OK to continue sending progress
// notifications for this token, as the client can't know when
// we accepted notification.
Ok(())
})?
.on::<lsp_types::notification::DidOpenTextDocument>(|this, params| {
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let already_exists = this
.mem_docs
.insert(path.clone(), DocumentData::new(params.text_document.version))
.is_err();
if already_exists {
tracing::error!("duplicate DidOpenTextDocument: {}", path);
}
this.vfs
.write()
.0
.set_file_contents(path, Some(params.text_document.text.into_bytes()));
}
Ok(())
})?
.on::<notifs::Cancel>(handlers::handle_cancel)?
.on::<notifs::WorkDoneProgressCancel>(handlers::handle_work_done_progress_cancel)?
.on::<notifs::DidOpenTextDocument>(handlers::handle_did_open_text_document)?
.on::<notifs::DidChangeTextDocument>(handlers::handle_did_change_text_document)?
.on::<notifs::DidCloseTextDocument>(handlers::handle_did_close_text_document)?
.on::<notifs::DidSaveTextDocument>(handlers::handle_did_save_text_document)?
.on::<notifs::DidChangeConfiguration>(handlers::handle_did_change_configuration)?
.on::<notifs::DidChangeWorkspaceFolders>(handlers::handle_did_change_workspace_folders)?
.on::<notifs::DidChangeWatchedFiles>(handlers::handle_did_change_watched_files)?
.on::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)?
.on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
match this.mem_docs.get_mut(&path) {
Some(doc) => {
// The version passed in DidChangeTextDocument is the version after all edits are applied
// so we should apply it before the vfs is notified.
doc.version = params.text_document.version;
}
None => {
tracing::error!("unexpected DidChangeTextDocument: {}", path);
return Ok(());
}
};
let vfs = &mut this.vfs.write().0;
let file_id = vfs.file_id(&path).unwrap();
let text = apply_document_changes(
this.config.position_encoding(),
|| std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(),
params.content_changes,
);
vfs.set_file_contents(path, Some(text.into_bytes()));
}
Ok(())
})?
.on::<lsp_types::notification::DidCloseTextDocument>(|this, params| {
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
if this.mem_docs.remove(&path).is_err() {
tracing::error!("orphan DidCloseTextDocument: {}", path);
}
this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
if let Some(path) = path.as_path() {
this.loader.handle.invalidate(path.to_path_buf());
}
}
Ok(())
})?
.on::<lsp_ext::ClearFlycheck>(|this, ()| {
this.diagnostics.clear_check_all();
Ok(())
})?
.on::<lsp_ext::RunFlycheck>(|this, params| {
if let Some(text_document) = params.text_document {
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
if run_flycheck(this, vfs_path) {
return Ok(());
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in this.flycheck.iter() {
flycheck.restart();
}
Ok(())
})?
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
}
}
if !this.config.check_on_save() || run_flycheck(this, vfs_path) {
return Ok(());
}
} else if this.config.check_on_save() {
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in this.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
})?
.on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {
// As stated in https://github.com/microsoft/language-server-protocol/issues/676,
// this notification's parameters should be ignored and the actual config queried separately.
this.send_request::<lsp_types::request::WorkspaceConfiguration>(
lsp_types::ConfigurationParams {
items: vec![lsp_types::ConfigurationItem {
scope_uri: None,
section: Some("rust-analyzer".to_string()),
}],
},
|this, resp| {
tracing::debug!("config update response: '{:?}", resp);
let lsp_server::Response { error, result, .. } = resp;
match (error, result) {
(Some(err), _) => {
tracing::error!("failed to fetch the server settings: {:?}", err)
}
(None, Some(mut configs)) => {
if let Some(json) = configs.get_mut(0) {
// Note that json can be null according to the spec if the client can't
// provide a configuration. This is handled in Config::update below.
let mut config = Config::clone(&*this.config);
if let Err(error) = config.update(json.take()) {
this.show_message(
lsp_types::MessageType::WARNING,
error.to_string(),
false,
);
}
this.update_configuration(config);
}
}
(None, None) => tracing::error!(
"received empty server settings response from the client"
),
}
},
);
Ok(())
})?
.on::<lsp_types::notification::DidChangeWorkspaceFolders>(|this, params| {
let config = Arc::make_mut(&mut this.config);
for workspace in params.event.removed {
let Ok(path) = workspace.uri.to_file_path() else { continue };
let Ok(path) = AbsPathBuf::try_from(path) else { continue };
let Some(position) = config.workspace_roots.iter().position(|it| it == &path) else { continue };
config.workspace_roots.remove(position);
}
let added = params
.event
.added
.into_iter()
.filter_map(|it| it.uri.to_file_path().ok())
.filter_map(|it| AbsPathBuf::try_from(it).ok());
config.workspace_roots.extend(added);
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
this.fetch_workspaces_queue.request_op("client workspaces changed".to_string())
}
Ok(())
})?
.on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
for change in params.changes {
if let Ok(path) = from_proto::abs_path(&change.uri) {
this.loader.handle.invalidate(path);
}
}
Ok(())
})?
.on::<lsp_ext::ClearFlycheck>(handlers::handle_clear_flycheck)?
.on::<lsp_ext::RunFlycheck>(handlers::handle_run_flycheck)?
.finish();
Ok(())
}
@ -1000,16 +793,60 @@ impl GlobalState {
tracing::trace!("updating notifications for {:?}", subscriptions);
let snapshot = self.snapshot();
self.task_pool.handle.spawn(move || {
// Diagnostics are triggered by the user typing
// so we run them on a latency sensitive thread.
self.task_pool.handle.spawn(stdx::thread::ThreadIntent::LatencySensitive, move || {
let _p = profile::span("publish_diagnostics");
let _ctx = stdx::panic_context::enter("publish_diagnostics".to_owned());
let diagnostics = subscriptions
.into_iter()
.filter_map(|file_id| {
handlers::publish_diagnostics(&snapshot, file_id)
.ok()
.map(|diags| (file_id, diags))
let line_index = snapshot.file_line_index(file_id).ok()?;
Some((
file_id,
line_index,
snapshot
.analysis
.diagnostics(
&snapshot.config.diagnostics(),
ide::AssistResolveStrategy::None,
file_id,
)
.ok()?,
))
})
.collect::<Vec<_>>();
Task::Diagnostics(diagnostics)
})
.map(|(file_id, line_index, it)| {
(
file_id,
it.into_iter()
.map(move |d| lsp_types::Diagnostic {
range: crate::to_proto::range(&line_index, d.range),
severity: Some(crate::to_proto::diagnostic_severity(d.severity)),
code: Some(lsp_types::NumberOrString::String(
d.code.as_str().to_string(),
)),
code_description: Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&format!(
"https://rust-analyzer.github.io/manual.html#{}",
d.code.as_str()
))
.unwrap(),
}),
source: Some("rust-analyzer".to_string()),
message: d.message,
related_information: None,
tags: if d.unused {
Some(vec![lsp_types::DiagnosticTag::UNNECESSARY])
} else {
None
},
data: None,
})
.collect::<Vec<_>>(),
)
});
Task::Diagnostics(diagnostics.collect())
});
}
}

View file

@ -28,7 +28,7 @@ pub(crate) fn format_docs(src: &str) -> String {
if in_code_block {
let trimmed = line.trim_start();
if trimmed.starts_with("##") {
if is_rust && trimmed.starts_with("##") {
line = &trimmed[1..];
}
}
@ -154,4 +154,12 @@ let s = "foo
assert_eq!(format_docs(comment), "```rust\nlet s = \"foo\n# bar # baz\";\n```");
}
#[test]
fn test_format_docs_handles_double_hashes_non_rust() {
let comment = r#"```markdown
## A second-level heading
```"#;
assert_eq!(format_docs(comment), "```markdown\n## A second-level heading\n```");
}
}

View file

@ -3,23 +3,23 @@
pub(crate) type Cause = String;
pub(crate) struct OpQueue<Output> {
op_requested: Option<Cause>,
pub(crate) struct OpQueue<Args = (), Output = ()> {
op_requested: Option<(Cause, Args)>,
op_in_progress: bool,
last_op_result: Output,
}
impl<Output: Default> Default for OpQueue<Output> {
impl<Args, Output: Default> Default for OpQueue<Args, Output> {
fn default() -> Self {
Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
}
}
impl<Output> OpQueue<Output> {
pub(crate) fn request_op(&mut self, reason: Cause) {
self.op_requested = Some(reason);
impl<Args, Output> OpQueue<Args, Output> {
pub(crate) fn request_op(&mut self, reason: Cause, args: Args) {
self.op_requested = Some((reason, args));
}
pub(crate) fn should_start_op(&mut self) -> Option<Cause> {
pub(crate) fn should_start_op(&mut self) -> Option<(Cause, Args)> {
if self.op_in_progress {
return None;
}

View file

@ -12,22 +12,24 @@
//! correct. Instead, we try to provide a best-effort service. Even if the
//! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests.
use std::{collections::hash_map::Entry, mem, sync::Arc};
use std::{collections::hash_map::Entry, iter, mem, sync};
use flycheck::{FlycheckConfig, FlycheckHandle};
use hir::db::DefDatabase;
use ide::Change;
use ide_db::{
base_db::{
CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
ProcMacroLoadResult, SourceRoot, VfsPath,
salsa::Durability, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, SourceRoot, VfsPath,
},
FxHashMap,
};
use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer};
use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts};
use stdx::{format_to, thread::ThreadIntent};
use syntax::SmolStr;
use triomphe::Arc;
use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
use crate::{
@ -54,11 +56,19 @@ pub(crate) enum BuildDataProgress {
End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
}
#[derive(Debug)]
pub(crate) enum ProcMacroProgress {
Begin,
Report(String),
End(ProcMacros),
}
impl GlobalState {
pub(crate) fn is_quiescent(&self) -> bool {
!(self.last_reported_status.is_none()
|| self.fetch_workspaces_queue.op_in_progress()
|| self.fetch_build_data_queue.op_in_progress()
|| self.fetch_proc_macros_queue.op_in_progress()
|| self.vfs_progress_config_version < self.vfs_config_version
|| self.vfs_progress_n_done < self.vfs_progress_n_total)
}
@ -66,21 +76,27 @@ impl GlobalState {
pub(crate) fn update_configuration(&mut self, config: Config) {
let _p = profile::span("GlobalState::update_configuration");
let old_config = mem::replace(&mut self.config, Arc::new(config));
if self.config.lru_capacity() != old_config.lru_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_capacity());
if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
}
if self.config.lru_query_capacities() != old_config.lru_query_capacities() {
self.analysis_host.update_lru_capacities(
&self.config.lru_query_capacities().cloned().unwrap_or_default(),
);
}
if self.config.linked_projects() != old_config.linked_projects() {
self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), ())
} else if self.config.flycheck() != old_config.flycheck() {
self.reload_flycheck();
}
if self.analysis_host.raw_database().enable_proc_attr_macros()
if self.analysis_host.raw_database().expand_proc_attr_macros()
!= self.config.expand_proc_attr_macros()
{
self.analysis_host
.raw_database_mut()
.set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
self.analysis_host.raw_database_mut().set_expand_proc_attr_macros_with_durability(
self.config.expand_proc_attr_macros(),
Durability::HIGH,
);
}
}
@ -94,12 +110,16 @@ impl GlobalState {
if self.proc_macro_changed {
status.health = lsp_ext::Health::Warning;
message.push_str("Reload required due to source changes of a procedural macro.\n\n");
message.push_str("Proc-macros have changed and need to be rebuild.\n\n");
}
if let Err(_) = self.fetch_build_data_error() {
status.health = lsp_ext::Health::Warning;
message.push_str("Failed to run build scripts of some packages.\n\n");
}
if self.proc_macro_clients.iter().any(|it| it.is_err()) {
status.health = lsp_ext::Health::Warning;
message.push_str("Failed to spawn one or more proc-macro servers.\n\n");
}
if !self.config.cargo_autoreload()
&& self.is_quiescent()
&& self.fetch_workspaces_queue.op_requested()
@ -112,17 +132,37 @@ impl GlobalState {
&& self.config.notifications().cargo_toml_not_found
{
status.health = lsp_ext::Health::Warning;
message.push_str("Failed to discover workspace.\n\n");
message.push_str("Failed to discover workspace.\n");
message.push_str("Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/manual.html#rust-analyzer.linkedProjects) setting.\n\n");
}
if let Some(err) = &self.config_errors {
status.health = lsp_ext::Health::Warning;
format_to!(message, "{err}\n");
}
if let Some(err) = &self.last_flycheck_error {
status.health = lsp_ext::Health::Warning;
message.push_str(err);
message.push('\n');
}
for ws in self.workspaces.iter() {
let (ProjectWorkspace::Cargo { sysroot, .. }
| ProjectWorkspace::Json { sysroot, .. }
| ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws;
if let Err(Some(e)) = sysroot {
status.health = lsp_ext::Health::Warning;
message.push_str(e);
message.push_str("\n\n");
match sysroot {
Err(None) => (),
Err(Some(e)) => {
status.health = lsp_ext::Health::Warning;
message.push_str(e);
message.push_str("\n\n");
}
Ok(s) => {
if let Some(e) = s.loading_warning() {
status.health = lsp_ext::Health::Warning;
message.push_str(&e);
message.push_str("\n\n");
}
}
}
if let ProjectWorkspace::Cargo { rustc: Err(Some(e)), .. } = ws {
status.health = lsp_ext::Health::Warning;
@ -145,7 +185,7 @@ impl GlobalState {
pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
tracing::info!(%cause, "will fetch workspaces");
self.task_pool.handle.spawn_with_sender({
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
let linked_projects = self.config.linked_projects();
let detached_files = self.config.detached_files().to_vec();
let cargo_config = self.config.cargo();
@ -177,11 +217,30 @@ impl GlobalState {
it.clone(),
cargo_config.target.as_deref(),
&cargo_config.extra_env,
None,
))
}
})
.collect::<Vec<_>>();
let mut i = 0;
while i < workspaces.len() {
if let Ok(w) = &workspaces[i] {
let dupes: Vec<_> = workspaces
.iter()
.enumerate()
.skip(i + 1)
.filter_map(|(i, it)| {
it.as_ref().ok().filter(|ws| ws.eq_ignore_build_data(w)).map(|_| i)
})
.collect();
dupes.into_iter().rev().for_each(|d| {
_ = workspaces.remove(d);
});
}
i += 1;
}
if !detached_files.is_empty() {
workspaces.push(project_model::ProjectWorkspace::load_detached_files(
detached_files,
@ -201,7 +260,7 @@ impl GlobalState {
tracing::info!(%cause, "will fetch build data");
let workspaces = Arc::clone(&self.workspaces);
let config = self.config.cargo();
self.task_pool.handle.spawn_with_sender(move |sender| {
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
let progress = {
@ -216,6 +275,63 @@ impl GlobalState {
});
}
pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec<ProcMacroPaths>) {
tracing::info!(%cause, "will load proc macros");
let dummy_replacements = self.config.dummy_replacements().clone();
let proc_macro_clients = self.proc_macro_clients.clone();
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
sender.send(Task::LoadProcMacros(ProcMacroProgress::Begin)).unwrap();
let dummy_replacements = &dummy_replacements;
let progress = {
let sender = sender.clone();
&move |msg| {
sender.send(Task::LoadProcMacros(ProcMacroProgress::Report(msg))).unwrap()
}
};
let mut res = FxHashMap::default();
let chain = proc_macro_clients
.iter()
.map(|res| res.as_ref().map_err(|e| e.to_string()))
.chain(iter::repeat_with(|| Err("Proc macros servers are not running".into())));
for (client, paths) in chain.zip(paths) {
res.extend(paths.into_iter().map(move |(crate_id, res)| {
(
crate_id,
res.map_or_else(
|_| Err("proc macro crate is missing dylib".to_owned()),
|(crate_name, path)| {
progress(path.display().to_string());
client.as_ref().map_err(Clone::clone).and_then(|client| {
load_proc_macro(
client,
&path,
crate_name
.as_deref()
.and_then(|crate_name| {
dummy_replacements.get(crate_name).map(|v| &**v)
})
.unwrap_or_default(),
)
})
},
),
)
}));
}
sender.send(Task::LoadProcMacros(ProcMacroProgress::End(res))).unwrap();
});
}
pub(crate) fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
let mut change = Change::new();
change.set_proc_macros(proc_macros);
self.analysis_host.apply_change(change);
}
pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
let _p = profile::span("GlobalState::switch_workspaces");
tracing::info!(%cause, "will switch workspaces");
@ -303,43 +419,39 @@ impl GlobalState {
);
}
let mut change = Change::new();
let files_config = self.config.files();
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
if self.proc_macro_clients.is_empty() {
if let Some((path, path_manually_set)) = self.config.proc_macro_srv() {
if self.proc_macro_clients.is_empty() || !same_workspaces {
if self.config.expand_proc_macros() {
tracing::info!("Spawning proc-macro servers");
self.proc_macro_clients = self
.workspaces
.iter()
.map(|ws| {
let (path, args): (_, &[_]) = if path_manually_set {
tracing::debug!(
"Pro-macro server path explicitly set: {}",
path.display()
);
(path.clone(), &[])
} else {
match ws.find_sysroot_proc_macro_srv() {
Some(server_path) => (server_path, &[]),
None => (path.clone(), &["proc-macro"]),
}
};
tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
ProcMacroServer::spawn(path.clone(), args).map_err(|err| {
let error = format!(
"Failed to run proc-macro server from path {}, error: {:?}",
path.display(),
err
);
tracing::error!(error);
error
// FIXME: use `Arc::from_iter` when it becomes available
self.proc_macro_clients = Arc::from(
self.workspaces
.iter()
.map(|ws| {
let path = match self.config.proc_macro_srv() {
Some(path) => path,
None => ws.find_sysroot_proc_macro_srv()?,
};
tracing::info!("Using proc-macro server at {}", path.display(),);
ProcMacroServer::spawn(path.clone()).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {}, error: {:?}",
path.display(),
err
);
anyhow::anyhow!(
"Failed to run proc-macro server from path {}, error: {:?}",
path.display(),
err
)
})
})
})
.collect()
.collect::<Vec<_>>(),
)
};
}
@ -353,56 +465,48 @@ impl GlobalState {
watch,
version: self.vfs_config_version,
});
self.source_root_config = project_folders.source_root_config;
// Create crate graph from all the workspaces
let crate_graph = {
let dummy_replacements = self.config.dummy_replacements();
let (crate_graph, proc_macro_paths) = {
let vfs = &mut self.vfs.write().0;
let loader = &mut self.loader;
let mem_docs = &self.mem_docs;
let mut load = move |path: &AbsPath| {
let mut load = |path: &AbsPath| {
let _p = profile::span("switch_workspaces::load");
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
if !mem_docs.contains(&vfs_path) {
let contents = loader.handle.load_sync(path);
vfs.set_file_contents(vfs_path.clone(), contents);
match vfs.file_id(&vfs_path) {
Some(file_id) => Some(file_id),
None => {
if !self.mem_docs.contains(&vfs_path) {
let contents = loader.handle.load_sync(path);
vfs.set_file_contents(vfs_path.clone(), contents);
}
vfs.file_id(&vfs_path)
}
}
let res = vfs.file_id(&vfs_path);
if res.is_none() {
tracing::warn!("failed to load {}", path.display())
}
res
};
let mut crate_graph = CrateGraph::default();
for (idx, ws) in self.workspaces.iter().enumerate() {
let proc_macro_client = match self.proc_macro_clients.get(idx) {
Some(res) => res.as_ref().map_err(|e| &**e),
None => Err("Proc macros are disabled"),
};
let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
load_proc_macro(
proc_macro_client,
path,
dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
)
};
crate_graph.extend(ws.to_crate_graph(
&mut load_proc_macro,
&mut load,
&self.config.cargo().extra_env,
));
let mut proc_macros = Vec::default();
for ws in &**self.workspaces {
let (other, mut crate_proc_macros) =
ws.to_crate_graph(&mut load, &self.config.extra_env());
crate_graph.extend(other, &mut crate_proc_macros);
proc_macros.push(crate_proc_macros);
}
crate_graph
(crate_graph, proc_macros)
};
let mut change = Change::new();
if self.config.expand_proc_macros() {
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
}
change.set_crate_graph(crate_graph);
self.source_root_config = project_folders.source_root_config;
self.analysis_host.apply_change(change);
self.process_changes();
self.reload_flycheck();
tracing::info!("did switch workspaces");
}
@ -642,14 +746,12 @@ impl SourceRootConfig {
/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
/// with an identity dummy expander.
pub(crate) fn load_proc_macro(
server: Result<&ProcMacroServer, &str>,
server: &ProcMacroServer,
path: &AbsPath,
dummy_replace: &[Box<str>],
) -> ProcMacroLoadResult {
let server = server.map_err(ToOwned::to_owned)?;
let res: Result<Vec<_>, String> = (|| {
let dylib = MacroDylib::new(path.to_path_buf())
.map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
let dylib = MacroDylib::new(path.to_path_buf());
let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
if vec.is_empty() {
return Err("proc macro library returned no proc macros".to_string());
@ -684,14 +786,14 @@ pub(crate) fn load_proc_macro(
proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
};
let expander: Arc<dyn ProcMacroExpander> =
let expander: sync::Arc<dyn ProcMacroExpander> =
if dummy_replace.iter().any(|replace| &**replace == name) {
match kind {
ProcMacroKind::Attr => Arc::new(IdentityExpander),
_ => Arc::new(EmptyExpander),
ProcMacroKind::Attr => sync::Arc::new(IdentityExpander),
_ => sync::Arc::new(EmptyExpander),
}
} else {
Arc::new(Expander(expander))
sync::Arc::new(Expander(expander))
};
ProcMacro { name, kind, expander }
}

View file

@ -13,7 +13,7 @@ macro_rules! define_semantic_token_types {
$($standard:ident),*$(,)?
}
custom {
$(($custom:ident, $string:literal)),*$(,)?
$(($custom:ident, $string:literal) $(=> $fallback:ident)?),*$(,)?
}
) => {
@ -24,6 +24,15 @@ macro_rules! define_semantic_token_types {
$(SemanticTokenType::$standard,)*
$($custom),*
];
pub(crate) fn standard_fallback_type(token: SemanticTokenType) -> Option<SemanticTokenType> {
$(
if token == $custom {
None $(.or(Some(SemanticTokenType::$fallback)))?
} else
)*
{ Some(token )}
}
};
}
@ -51,42 +60,46 @@ define_semantic_token_types![
custom {
(ANGLE, "angle"),
(ARITHMETIC, "arithmetic"),
(ATTRIBUTE, "attribute"),
(ATTRIBUTE_BRACKET, "attributeBracket"),
(BITWISE, "bitwise"),
(ARITHMETIC, "arithmetic") => OPERATOR,
(ATTRIBUTE, "attribute") => DECORATOR,
(ATTRIBUTE_BRACKET, "attributeBracket") => DECORATOR,
(BITWISE, "bitwise") => OPERATOR,
(BOOLEAN, "boolean"),
(BRACE, "brace"),
(BRACKET, "bracket"),
(BUILTIN_ATTRIBUTE, "builtinAttribute"),
(BUILTIN_ATTRIBUTE, "builtinAttribute") => DECORATOR,
(BUILTIN_TYPE, "builtinType"),
(CHAR, "character"),
(CHAR, "character") => STRING,
(COLON, "colon"),
(COMMA, "comma"),
(COMPARISON, "comparison"),
(COMPARISON, "comparison") => OPERATOR,
(CONST_PARAMETER, "constParameter"),
(DERIVE, "derive"),
(DERIVE_HELPER, "deriveHelper"),
(DERIVE, "derive") => DECORATOR,
(DERIVE_HELPER, "deriveHelper") => DECORATOR,
(DOT, "dot"),
(ESCAPE_SEQUENCE, "escapeSequence"),
(FORMAT_SPECIFIER, "formatSpecifier"),
(GENERIC, "generic"),
(ESCAPE_SEQUENCE, "escapeSequence") => STRING,
(FORMAT_SPECIFIER, "formatSpecifier") => STRING,
(GENERIC, "generic") => TYPE_PARAMETER,
(LABEL, "label"),
(LIFETIME, "lifetime"),
(LOGICAL, "logical"),
(MACRO_BANG, "macroBang"),
(LOGICAL, "logical") => OPERATOR,
(MACRO_BANG, "macroBang") => MACRO,
(PARENTHESIS, "parenthesis"),
(PUNCTUATION, "punctuation"),
(SELF_KEYWORD, "selfKeyword"),
(SELF_TYPE_KEYWORD, "selfTypeKeyword"),
(SELF_KEYWORD, "selfKeyword") => KEYWORD,
(SELF_TYPE_KEYWORD, "selfTypeKeyword") => KEYWORD,
(SEMICOLON, "semicolon"),
(TYPE_ALIAS, "typeAlias"),
(TOOL_MODULE, "toolModule"),
(TOOL_MODULE, "toolModule") => DECORATOR,
(UNION, "union"),
(UNRESOLVED_REFERENCE, "unresolvedReference"),
}
];
macro_rules! count_tts {
() => {0usize};
($_head:tt $($tail:tt)*) => {1usize + count_tts!($($tail)*)};
}
macro_rules! define_semantic_token_modifiers {
(
standard {
@ -105,6 +118,8 @@ macro_rules! define_semantic_token_modifiers {
$(SemanticTokenModifier::$standard,)*
$($custom),*
];
const LAST_STANDARD_MOD: usize = count_tts!($($standard)*);
};
}
@ -126,6 +141,7 @@ define_semantic_token_modifiers![
(INJECTED, "injected"),
(INTRA_DOC_LINK, "intraDocLink"),
(LIBRARY, "library"),
(MACRO_MODIFIER, "macro"),
(MUTABLE, "mutable"),
(PUBLIC, "public"),
(REFERENCE, "reference"),
@ -137,6 +153,13 @@ define_semantic_token_modifiers![
#[derive(Default)]
pub(crate) struct ModifierSet(pub(crate) u32);
impl ModifierSet {
pub(crate) fn standard_fallback(&mut self) {
// Remove all non standard modifiers
self.0 = self.0 & !(!0u32 << LAST_STANDARD_MOD)
}
}
impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();

View file

@ -1,53 +1,42 @@
//! A thin wrapper around `ThreadPool` to make sure that we join all things
//! properly.
//! A thin wrapper around [`stdx::thread::Pool`] which threads a sender through spawned jobs.
//! It is used in [`crate::global_state::GlobalState`] throughout the main loop.
use crossbeam_channel::Sender;
use stdx::thread::{Pool, ThreadIntent};
pub(crate) struct TaskPool<T> {
sender: Sender<T>,
inner: threadpool::ThreadPool,
pool: Pool,
}
impl<T> TaskPool<T> {
pub(crate) fn new_with_threads(sender: Sender<T>, threads: usize) -> TaskPool<T> {
const STACK_SIZE: usize = 8 * 1024 * 1024;
let inner = threadpool::Builder::new()
.thread_name("Worker".into())
.thread_stack_size(STACK_SIZE)
.num_threads(threads)
.build();
TaskPool { sender, inner }
TaskPool { sender, pool: Pool::new(threads) }
}
pub(crate) fn spawn<F>(&mut self, task: F)
pub(crate) fn spawn<F>(&mut self, intent: ThreadIntent, task: F)
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,
{
self.inner.execute({
self.pool.spawn(intent, {
let sender = self.sender.clone();
move || sender.send(task()).unwrap()
})
}
pub(crate) fn spawn_with_sender<F>(&mut self, task: F)
pub(crate) fn spawn_with_sender<F>(&mut self, intent: ThreadIntent, task: F)
where
F: FnOnce(Sender<T>) + Send + 'static,
T: Send + 'static,
{
self.inner.execute({
self.pool.spawn(intent, {
let sender = self.sender.clone();
move || task(sender)
})
}
pub(crate) fn len(&self) -> usize {
self.inner.queued_count()
}
}
impl<T> Drop for TaskPool<T> {
fn drop(&mut self) {
self.inner.join()
self.pool.len()
}
}

View file

@ -24,7 +24,7 @@ use crate::{
line_index::{LineEndings, LineIndex, PositionEncoding},
lsp_ext,
lsp_utils::invalid_params_error,
semantic_tokens,
semantic_tokens::{self, standard_fallback_type},
};
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
@ -32,7 +32,7 @@ pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::P
match line_index.encoding {
PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
PositionEncoding::Wide(enc) => {
let line_col = line_index.index.to_wide(enc, line_col);
let line_col = line_index.index.to_wide(enc, line_col).unwrap();
lsp_types::Position::new(line_col.line, line_col.col)
}
}
@ -279,7 +279,7 @@ fn completion_item(
let mut lsp_item = lsp_types::CompletionItem {
label: item.label.to_string(),
detail: item.detail.map(|it| it.to_string()),
detail: item.detail,
filter_text: Some(lookup),
kind: Some(completion_item_kind(item.kind)),
text_edit: Some(text_edit),
@ -306,12 +306,10 @@ fn completion_item(
let imports: Vec<_> = item
.import_to_add
.into_iter()
.filter_map(|import_edit| {
let import_path = &import_edit.import_path;
let import_name = import_path.segments().last()?;
.filter_map(|(import_path, import_name)| {
Some(lsp_ext::CompletionImport {
full_import_path: import_path.to_string(),
imported_name: import_name.to_string(),
full_import_path: import_path,
imported_name: import_name,
})
})
.collect();
@ -434,83 +432,23 @@ pub(crate) fn signature_help(
pub(crate) fn inlay_hint(
snap: &GlobalStateSnapshot,
line_index: &LineIndex,
render_colons: bool,
mut inlay_hint: InlayHint,
inlay_hint: InlayHint,
) -> Cancellable<lsp_types::InlayHint> {
match inlay_hint.kind {
InlayKind::Parameter if render_colons => inlay_hint.label.append_str(":"),
InlayKind::Type if render_colons => inlay_hint.label.prepend_str(": "),
InlayKind::ClosureReturnType => inlay_hint.label.prepend_str(" -> "),
InlayKind::Discriminant => inlay_hint.label.prepend_str(" = "),
_ => {}
}
let (label, tooltip) = inlay_hint_label(snap, inlay_hint.label)?;
Ok(lsp_types::InlayHint {
position: match inlay_hint.kind {
// before annotated thing
InlayKind::OpeningParenthesis
| InlayKind::Parameter
| InlayKind::Adjustment
| InlayKind::BindingMode => position(line_index, inlay_hint.range.start()),
// after annotated thing
InlayKind::ClosureReturnType
| InlayKind::Type
| InlayKind::Discriminant
| InlayKind::Chaining
| InlayKind::GenericParamList
| InlayKind::ClosingParenthesis
| InlayKind::AdjustmentPostfix
| InlayKind::Lifetime
| InlayKind::ClosingBrace => position(line_index, inlay_hint.range.end()),
position: match inlay_hint.position {
ide::InlayHintPosition::Before => position(line_index, inlay_hint.range.start()),
ide::InlayHintPosition::After => position(line_index, inlay_hint.range.end()),
},
padding_left: Some(match inlay_hint.kind {
InlayKind::Type => !render_colons,
InlayKind::Chaining | InlayKind::ClosingBrace => true,
InlayKind::ClosingParenthesis
| InlayKind::Discriminant
| InlayKind::OpeningParenthesis
| InlayKind::BindingMode
| InlayKind::ClosureReturnType
| InlayKind::GenericParamList
| InlayKind::Adjustment
| InlayKind::AdjustmentPostfix
| InlayKind::Lifetime
| InlayKind::Parameter => false,
}),
padding_right: Some(match inlay_hint.kind {
InlayKind::ClosingParenthesis
| InlayKind::OpeningParenthesis
| InlayKind::Chaining
| InlayKind::ClosureReturnType
| InlayKind::GenericParamList
| InlayKind::Adjustment
| InlayKind::AdjustmentPostfix
| InlayKind::Type
| InlayKind::Discriminant
| InlayKind::ClosingBrace => false,
InlayKind::BindingMode => {
matches!(&label, lsp_types::InlayHintLabel::String(s) if s != "&")
}
InlayKind::Parameter | InlayKind::Lifetime => true,
}),
padding_left: Some(inlay_hint.pad_left),
padding_right: Some(inlay_hint.pad_right),
kind: match inlay_hint.kind {
InlayKind::Parameter => Some(lsp_types::InlayHintKind::PARAMETER),
InlayKind::ClosureReturnType | InlayKind::Type | InlayKind::Chaining => {
Some(lsp_types::InlayHintKind::TYPE)
}
InlayKind::ClosingParenthesis
| InlayKind::Discriminant
| InlayKind::OpeningParenthesis
| InlayKind::BindingMode
| InlayKind::GenericParamList
| InlayKind::Lifetime
| InlayKind::Adjustment
| InlayKind::AdjustmentPostfix
| InlayKind::ClosingBrace => None,
InlayKind::Type | InlayKind::Chaining => Some(lsp_types::InlayHintKind::TYPE),
_ => None,
},
text_edits: None,
text_edits: inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it)),
data: None,
tooltip,
label,
@ -580,6 +518,8 @@ pub(crate) fn semantic_tokens(
text: &str,
line_index: &LineIndex,
highlights: Vec<HlRange>,
semantics_tokens_augments_syntax_tokens: bool,
non_standard_tokens: bool,
) -> lsp_types::SemanticTokens {
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
@ -589,7 +529,35 @@ pub(crate) fn semantic_tokens(
continue;
}
let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
if semantics_tokens_augments_syntax_tokens {
match highlight_range.highlight.tag {
HlTag::BoolLiteral
| HlTag::ByteLiteral
| HlTag::CharLiteral
| HlTag::Comment
| HlTag::Keyword
| HlTag::NumericLiteral
| HlTag::Operator(_)
| HlTag::Punctuation(_)
| HlTag::StringLiteral
| HlTag::None
if highlight_range.highlight.mods.is_empty() =>
{
continue
}
_ => (),
}
}
let (mut ty, mut mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
if !non_standard_tokens {
ty = match standard_fallback_type(ty) {
Some(ty) => ty,
None => continue,
};
mods.standard_fallback();
}
let token_index = semantic_tokens::type_index(ty);
let modifier_bitset = mods.0;
@ -710,6 +678,7 @@ fn semantic_token_type_and_modifiers(
HlMod::Injected => semantic_tokens::INJECTED,
HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
HlMod::Library => semantic_tokens::LIBRARY,
HlMod::Macro => semantic_tokens::MACRO_MODIFIER,
HlMod::Mutable => semantic_tokens::MUTABLE,
HlMod::Public => semantic_tokens::PUBLIC,
HlMod::Reference => semantic_tokens::REFERENCE,
@ -1215,6 +1184,14 @@ pub(crate) fn code_lens(
data: None,
})
}
if lens_config.interpret {
let command = command::interpret_single(&r);
acc.push(lsp_types::CodeLens {
range: annotation_range,
command: Some(command),
data: None,
})
}
}
AnnotationKind::HasImpls { pos: file_range, data } => {
if !client_commands_config.show_reference {
@ -1257,7 +1234,16 @@ pub(crate) fn code_lens(
acc.push(lsp_types::CodeLens {
range: annotation_range,
command,
data: Some(to_value(lsp_ext::CodeLensResolveData::Impls(goto_params)).unwrap()),
data: (|| {
let version = snap.url_file_version(&url)?;
Some(
to_value(lsp_ext::CodeLensResolveData {
version,
kind: lsp_ext::CodeLensResolveDataKind::Impls(goto_params),
})
.unwrap(),
)
})(),
})
}
AnnotationKind::HasReferences { pos: file_range, data } => {
@ -1287,7 +1273,16 @@ pub(crate) fn code_lens(
acc.push(lsp_types::CodeLens {
range: annotation_range,
command,
data: Some(to_value(lsp_ext::CodeLensResolveData::References(doc_pos)).unwrap()),
data: (|| {
let version = snap.url_file_version(&url)?;
Some(
to_value(lsp_ext::CodeLensResolveData {
version,
kind: lsp_ext::CodeLensResolveDataKind::References(doc_pos),
})
.unwrap(),
)
})(),
})
}
}
@ -1341,6 +1336,15 @@ pub(crate) mod command {
}
}
pub(crate) fn interpret_single(_runnable: &lsp_ext::Runnable) -> lsp_types::Command {
lsp_types::Command {
title: "Interpret".into(),
command: "rust-analyzer.interpretFunction".into(),
// FIXME: use the `_runnable` here.
arguments: Some(vec![]),
}
}
pub(crate) fn goto_location(
snap: &GlobalStateSnapshot,
nav: &NavigationTarget,
@ -1406,9 +1410,8 @@ pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
#[cfg(test)]
mod tests {
use std::sync::Arc;
use ide::Analysis;
use triomphe::Arc;
use super::*;

View file

@ -59,7 +59,7 @@ use std::collections::Spam;
"#,
)
.with_config(serde_json::json!({
"cargo": { "sysroot": "discover" }
"cargo": { "sysroot": "discover" },
}))
.server()
.wait_until_workspace_is_loaded();
@ -508,7 +508,7 @@ fn main() {}
#[test]
fn test_missing_module_code_action_in_json_project() {
if skip_slow_tests() {
// return;
return;
}
let tmp_dir = TestDir::new();
@ -612,7 +612,7 @@ fn main() {{}}
"#
))
.with_config(serde_json::json!({
"cargo": { "sysroot": "discover" }
"cargo": { "sysroot": "discover" },
}))
.server()
.wait_until_workspace_is_loaded();
@ -685,7 +685,7 @@ version = \"0.0.0\"
#[test]
fn out_dirs_check() {
if skip_slow_tests() {
// return;
return;
}
let server = Project::with_fixture(
@ -711,10 +711,21 @@ fn main() {
println!("cargo:rerun-if-changed=build.rs");
}
//- /src/main.rs
#[rustc_builtin_macro] macro_rules! include {}
#[rustc_builtin_macro] macro_rules! include_str {}
#[rustc_builtin_macro] macro_rules! concat {}
#[rustc_builtin_macro] macro_rules! env {}
#![allow(warnings)]
#![feature(rustc_attrs)]
#[rustc_builtin_macro] macro_rules! include {
($file:expr $(,)?) => {{ /* compiler built-in */ }};
}
#[rustc_builtin_macro] macro_rules! include_str {
($file:expr $(,)?) => {{ /* compiler built-in */ }};
}
#[rustc_builtin_macro] macro_rules! concat {
($($e:ident),+ $(,)?) => {{ /* compiler built-in */ }};
}
#[rustc_builtin_macro] macro_rules! env {
($name:expr $(,)?) => {{ /* compiler built-in */ }};
($name:expr, $error_msg:expr $(,)?) => {{ /* compiler built-in */ }};
}
include!(concat!(env!("OUT_DIR"), "/hello.rs"));
@ -741,6 +752,9 @@ fn main() {
"enable": true
},
"sysroot": null,
"extraEnv": {
"RUSTC_BOOTSTRAP": "1"
}
}
}))
.server()
@ -749,7 +763,7 @@ fn main() {
let res = server.send_request::<HoverRequest>(HoverParams {
text_document_position_params: TextDocumentPositionParams::new(
server.doc_id("src/main.rs"),
Position::new(19, 10),
Position::new(30, 10),
),
work_done_progress_params: Default::default(),
});
@ -758,7 +772,7 @@ fn main() {
let res = server.send_request::<HoverRequest>(HoverParams {
text_document_position_params: TextDocumentPositionParams::new(
server.doc_id("src/main.rs"),
Position::new(20, 10),
Position::new(31, 10),
),
work_done_progress_params: Default::default(),
});
@ -768,23 +782,23 @@ fn main() {
GotoDefinitionParams {
text_document_position_params: TextDocumentPositionParams::new(
server.doc_id("src/main.rs"),
Position::new(17, 9),
Position::new(28, 9),
),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
},
json!([{
"originSelectionRange": {
"end": { "character": 10, "line": 17 },
"start": { "character": 8, "line": 17 }
"end": { "character": 10, "line": 28 },
"start": { "character": 8, "line": 28 }
},
"targetRange": {
"end": { "character": 9, "line": 8 },
"start": { "character": 0, "line": 7 }
"end": { "character": 9, "line": 19 },
"start": { "character": 0, "line": 18 }
},
"targetSelectionRange": {
"end": { "character": 8, "line": 8 },
"start": { "character": 7, "line": 8 }
"end": { "character": 8, "line": 19 },
"start": { "character": 7, "line": 19 }
},
"targetUri": "file:///[..]src/main.rs"
}]),
@ -794,23 +808,23 @@ fn main() {
GotoDefinitionParams {
text_document_position_params: TextDocumentPositionParams::new(
server.doc_id("src/main.rs"),
Position::new(18, 9),
Position::new(29, 9),
),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
},
json!([{
"originSelectionRange": {
"end": { "character": 10, "line": 18 },
"start": { "character": 8, "line": 18 }
"end": { "character": 10, "line": 29 },
"start": { "character": 8, "line": 29 }
},
"targetRange": {
"end": { "character": 9, "line": 12 },
"start": { "character": 0, "line":11 }
"end": { "character": 9, "line": 23 },
"start": { "character": 0, "line": 22 }
},
"targetSelectionRange": {
"end": { "character": 8, "line": 12 },
"start": { "character": 7, "line": 12 }
"end": { "character": 8, "line": 23 },
"start": { "character": 7, "line": 23 }
},
"targetUri": "file:///[..]src/main.rs"
}]),
@ -818,8 +832,7 @@ fn main() {
}
#[test]
// FIXME: Re-enable once we can run proc-macro tests on rust-lang/rust-analyzer again
#[cfg(any())]
#[cfg(feature = "sysroot-abi")]
fn resolve_proc_macro() {
use expect_test::expect;
if skip_slow_tests() {
@ -837,6 +850,7 @@ edition = "2021"
bar = {path = "../bar"}
//- /foo/src/main.rs
#![feature(rustc_attrs, decl_macro)]
use bar::Bar;
#[rustc_builtin_macro]
@ -913,7 +927,7 @@ pub fn foo(_input: TokenStream) -> TokenStream {
let res = server.send_request::<HoverRequest>(HoverParams {
text_document_position_params: TextDocumentPositionParams::new(
server.doc_id("foo/src/main.rs"),
Position::new(10, 9),
Position::new(11, 9),
),
work_done_progress_params: Default::default(),
});
@ -1083,10 +1097,18 @@ version = "0.0.0"
//- /bar/src/lib.rs
pub fn bar() {}
//- /baz/Cargo.toml
[package]
name = "baz"
version = "0.0.0"
//- /baz/src/lib.rs
"#,
)
.root("foo")
.root("bar")
.root("baz")
.with_config(json!({
"files": {
"excludeDirs": ["foo", "bar"]

View file

@ -9,11 +9,10 @@ use std::{
use crossbeam_channel::{after, select, Receiver};
use lsp_server::{Connection, Message, Notification, Request};
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
use project_model::ProjectManifest;
use rust_analyzer::{config::Config, lsp_ext, main_loop};
use serde::Serialize;
use serde_json::{json, to_string_pretty, Value};
use test_utils::Fixture;
use test_utils::FixtureWithProjectMeta;
use vfs::AbsPathBuf;
use crate::testdir::TestDir;
@ -37,8 +36,12 @@ impl<'a> Project<'a> {
"sysroot": null,
// Can't use test binary as rustc wrapper.
"buildScripts": {
"useRustcWrapper": false
"useRustcWrapper": false,
"enable": false,
},
},
"procMacro": {
"enable": false,
}
}),
}
@ -80,10 +83,12 @@ impl<'a> Project<'a> {
profile::init_from(crate::PROFILE);
});
let (mini_core, proc_macros, fixtures) = Fixture::parse(self.fixture);
assert!(proc_macros.is_empty());
let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } =
FixtureWithProjectMeta::parse(self.fixture);
assert!(proc_macro_names.is_empty());
assert!(mini_core.is_none());
for entry in fixtures {
assert!(toolchain.is_none());
for entry in fixture {
let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]);
fs::create_dir_all(path.parent().unwrap()).unwrap();
fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
@ -95,10 +100,6 @@ impl<'a> Project<'a> {
if roots.is_empty() {
roots.push(tmp_dir_path.clone());
}
let discovered_projects = roots
.into_iter()
.map(|it| ProjectManifest::discover_single(&it).unwrap())
.collect::<Vec<_>>();
let mut config = Config::new(
tmp_dir_path,
@ -138,10 +139,10 @@ impl<'a> Project<'a> {
})),
..Default::default()
},
Vec::new(),
roots,
);
config.discovered_projects = Some(discovered_projects);
config.update(self.config).expect("invalid config");
config.rediscover_workspaces();
Server::new(tmp_dir, config)
}
@ -154,7 +155,7 @@ pub(crate) fn project(fixture: &str) -> Server {
pub(crate) struct Server {
req_id: Cell<i32>,
messages: RefCell<Vec<Message>>,
_thread: jod_thread::JoinHandle<()>,
_thread: stdx::thread::JoinHandle,
client: Connection,
/// XXX: remove the tempdir last
dir: TestDir,
@ -164,7 +165,7 @@ impl Server {
fn new(dir: TestDir, config: Config) -> Server {
let (connection, client) = Connection::memory();
let _thread = jod_thread::Builder::new()
let _thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
.name("test server".to_string())
.spawn(move || main_loop(config, connection).unwrap())
.expect("failed to spawn a thread");
@ -251,6 +252,9 @@ impl Server {
.clone()
.extract::<lsp_ext::ServerStatusParams>("experimental/serverStatus")
.unwrap();
if status.health != lsp_ext::Health::Ok {
panic!("server errored/warned while loading workspace: {:?}", status.message);
}
status.quiescent
}
_ => false,

View file

@ -257,6 +257,8 @@ fn check_dbg(path: &Path, text: &str) {
"ide-db/src/generated/lints.rs",
// test for doc test for remove_dbg
"src/tests/generated.rs",
// `expect!` string can contain `dbg!` (due to .dbg postfix)
"ide-completion/src/tests/special.rs",
];
if need_dbg.iter().any(|p| path.ends_with(p)) {
return;