mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 13:25:09 +00:00
⬆️ rust-analyzer
This commit is contained in:
parent
3a57388d13
commit
4f55ebbd4f
122 changed files with 2885 additions and 1093 deletions
|
@ -25,7 +25,7 @@ itertools = "0.10.3"
|
|||
scip = "0.1.1"
|
||||
lsp-types = { version = "0.93.1", features = ["proposed"] }
|
||||
parking_lot = "0.12.1"
|
||||
xflags = "0.2.4"
|
||||
xflags = "0.3.0"
|
||||
oorandom = "11.1.3"
|
||||
rustc-hash = "1.1.0"
|
||||
serde = { version = "1.0.137", features = ["derive"] }
|
||||
|
@ -34,7 +34,7 @@ threadpool = "1.8.1"
|
|||
rayon = "1.5.3"
|
||||
num_cpus = "1.13.1"
|
||||
mimalloc = { version = "0.1.29", default-features = false, optional = true }
|
||||
lsp-server = { version = "0.6.0", path = "../../lib/lsp-server" }
|
||||
lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" }
|
||||
tracing = "0.1.35"
|
||||
tracing-subscriber = { version = "0.3.14", default-features = false, features = [
|
||||
"env-filter",
|
||||
|
@ -87,7 +87,6 @@ jemalloc = ["jemallocator", "profile/jemalloc"]
|
|||
force-always-assert = ["always-assert/force"]
|
||||
in-rust-tree = [
|
||||
"proc-macro-srv/sysroot-abi",
|
||||
"sourcegen/in-rust-tree",
|
||||
"ide/in-rust-tree",
|
||||
"syntax/in-rust-tree",
|
||||
]
|
||||
|
|
|
@ -37,16 +37,15 @@ fn main() {
|
|||
process::exit(code);
|
||||
}
|
||||
|
||||
if let Err(err) = try_main() {
|
||||
let flags = flags::RustAnalyzer::from_env_or_exit();
|
||||
if let Err(err) = try_main(flags) {
|
||||
tracing::error!("Unexpected error: {}", err);
|
||||
eprintln!("{}", err);
|
||||
process::exit(101);
|
||||
}
|
||||
}
|
||||
|
||||
fn try_main() -> Result<()> {
|
||||
let flags = flags::RustAnalyzer::from_env()?;
|
||||
|
||||
fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
|
||||
#[cfg(debug_assertions)]
|
||||
if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
|
||||
#[allow(unused_mut)]
|
||||
|
@ -76,10 +75,6 @@ fn try_main() -> Result<()> {
|
|||
println!("rust-analyzer {}", rust_analyzer::version());
|
||||
return Ok(());
|
||||
}
|
||||
if cmd.help {
|
||||
println!("{}", flags::RustAnalyzer::HELP);
|
||||
return Ok(());
|
||||
}
|
||||
with_extra_thread("LspServer", run_server)?;
|
||||
}
|
||||
flags::RustAnalyzerCmd::ProcMacro(flags::ProcMacro) => {
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::mem;
|
|||
|
||||
use cfg::{CfgAtom, CfgExpr};
|
||||
use ide::{FileId, RunnableKind, TestId};
|
||||
use project_model::{self, ManifestPath, TargetKind};
|
||||
use project_model::{self, CargoFeatures, ManifestPath, TargetKind};
|
||||
use vfs::AbsPathBuf;
|
||||
|
||||
use crate::{global_state::GlobalStateSnapshot, Result};
|
||||
|
@ -35,41 +35,41 @@ impl CargoTargetSpec {
|
|||
|
||||
match kind {
|
||||
RunnableKind::Test { test_id, attr } => {
|
||||
args.push("test".to_string());
|
||||
args.push("test".to_owned());
|
||||
extra_args.push(test_id.to_string());
|
||||
if let TestId::Path(_) = test_id {
|
||||
extra_args.push("--exact".to_string());
|
||||
extra_args.push("--exact".to_owned());
|
||||
}
|
||||
extra_args.push("--nocapture".to_string());
|
||||
extra_args.push("--nocapture".to_owned());
|
||||
if attr.ignore {
|
||||
extra_args.push("--ignored".to_string());
|
||||
extra_args.push("--ignored".to_owned());
|
||||
}
|
||||
}
|
||||
RunnableKind::TestMod { path } => {
|
||||
args.push("test".to_string());
|
||||
extra_args.push(path.to_string());
|
||||
extra_args.push("--nocapture".to_string());
|
||||
args.push("test".to_owned());
|
||||
extra_args.push(path.clone());
|
||||
extra_args.push("--nocapture".to_owned());
|
||||
}
|
||||
RunnableKind::Bench { test_id } => {
|
||||
args.push("bench".to_string());
|
||||
args.push("bench".to_owned());
|
||||
extra_args.push(test_id.to_string());
|
||||
if let TestId::Path(_) = test_id {
|
||||
extra_args.push("--exact".to_string());
|
||||
extra_args.push("--exact".to_owned());
|
||||
}
|
||||
extra_args.push("--nocapture".to_string());
|
||||
extra_args.push("--nocapture".to_owned());
|
||||
}
|
||||
RunnableKind::DocTest { test_id } => {
|
||||
args.push("test".to_string());
|
||||
args.push("--doc".to_string());
|
||||
args.push("test".to_owned());
|
||||
args.push("--doc".to_owned());
|
||||
extra_args.push(test_id.to_string());
|
||||
extra_args.push("--nocapture".to_string());
|
||||
extra_args.push("--nocapture".to_owned());
|
||||
}
|
||||
RunnableKind::Bin => {
|
||||
let subcommand = match spec {
|
||||
Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
|
||||
_ => "run",
|
||||
};
|
||||
args.push(subcommand.to_string());
|
||||
args.push(subcommand.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,29 +82,35 @@ impl CargoTargetSpec {
|
|||
};
|
||||
|
||||
let cargo_config = snap.config.cargo();
|
||||
if cargo_config.all_features {
|
||||
args.push("--all-features".to_string());
|
||||
|
||||
for feature in target_required_features {
|
||||
args.push("--features".to_string());
|
||||
args.push(feature);
|
||||
}
|
||||
} else {
|
||||
let mut features = Vec::new();
|
||||
if let Some(cfg) = cfg.as_ref() {
|
||||
required_features(cfg, &mut features);
|
||||
match &cargo_config.features {
|
||||
CargoFeatures::All => {
|
||||
args.push("--all-features".to_owned());
|
||||
for feature in target_required_features {
|
||||
args.push("--features".to_owned());
|
||||
args.push(feature);
|
||||
}
|
||||
}
|
||||
CargoFeatures::Selected { features, no_default_features } => {
|
||||
let mut feats = Vec::new();
|
||||
if let Some(cfg) = cfg.as_ref() {
|
||||
required_features(cfg, &mut feats);
|
||||
}
|
||||
|
||||
features.extend(cargo_config.features);
|
||||
features.extend(target_required_features);
|
||||
feats.extend(features.iter().cloned());
|
||||
feats.extend(target_required_features);
|
||||
|
||||
features.dedup();
|
||||
for feature in features {
|
||||
args.push("--features".to_string());
|
||||
args.push(feature);
|
||||
feats.dedup();
|
||||
for feature in feats {
|
||||
args.push("--features".to_owned());
|
||||
args.push(feature);
|
||||
}
|
||||
|
||||
if *no_default_features {
|
||||
args.push("--no-default-features".to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((args, extra_args))
|
||||
}
|
||||
|
||||
|
@ -136,7 +142,7 @@ impl CargoTargetSpec {
|
|||
}
|
||||
|
||||
pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
|
||||
buf.push("--package".to_string());
|
||||
buf.push("--package".to_owned());
|
||||
buf.push(self.package);
|
||||
|
||||
// Can't mix --doc with other target flags
|
||||
|
@ -145,23 +151,23 @@ impl CargoTargetSpec {
|
|||
}
|
||||
match self.target_kind {
|
||||
TargetKind::Bin => {
|
||||
buf.push("--bin".to_string());
|
||||
buf.push("--bin".to_owned());
|
||||
buf.push(self.target);
|
||||
}
|
||||
TargetKind::Test => {
|
||||
buf.push("--test".to_string());
|
||||
buf.push("--test".to_owned());
|
||||
buf.push(self.target);
|
||||
}
|
||||
TargetKind::Bench => {
|
||||
buf.push("--bench".to_string());
|
||||
buf.push("--bench".to_owned());
|
||||
buf.push(self.target);
|
||||
}
|
||||
TargetKind::Example => {
|
||||
buf.push("--example".to_string());
|
||||
buf.push("--example".to_owned());
|
||||
buf.push(self.target);
|
||||
}
|
||||
TargetKind::Lib => {
|
||||
buf.push("--lib".to_string());
|
||||
buf.push("--lib".to_owned());
|
||||
}
|
||||
TargetKind::Other | TargetKind::BuildScript => (),
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ use ide_db::base_db::{
|
|||
use itertools::Itertools;
|
||||
use oorandom::Rand32;
|
||||
use profile::{Bytes, StopWatch};
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
|
||||
use rayon::prelude::*;
|
||||
use rustc_hash::FxHashSet;
|
||||
use stdx::format_to;
|
||||
|
@ -55,7 +55,10 @@ impl flags::AnalysisStats {
|
|||
};
|
||||
|
||||
let mut cargo_config = CargoConfig::default();
|
||||
cargo_config.no_sysroot = self.no_sysroot;
|
||||
cargo_config.sysroot = match self.no_sysroot {
|
||||
true => None,
|
||||
false => Some(RustcSource::Discover),
|
||||
};
|
||||
let load_cargo_config = LoadCargoConfig {
|
||||
load_out_dirs_from_check: !self.disable_build_scripts,
|
||||
with_proc_macro: !self.disable_proc_macros,
|
||||
|
@ -81,7 +84,7 @@ impl flags::AnalysisStats {
|
|||
};
|
||||
|
||||
let (host, vfs, _proc_macro) =
|
||||
load_workspace(workspace, &cargo_config, &load_cargo_config)?;
|
||||
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
|
||||
let db = host.raw_database();
|
||||
eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
|
||||
eprint!(" (metadata {}", metadata_time);
|
||||
|
|
|
@ -31,8 +31,6 @@ xflags::xflags! {
|
|||
default cmd lsp-server {
|
||||
/// Print version.
|
||||
optional --version
|
||||
/// Print help.
|
||||
optional -h, --help
|
||||
|
||||
/// Dump a LSP config JSON schema.
|
||||
optional --print-config-schema
|
||||
|
@ -54,10 +52,10 @@ xflags::xflags! {
|
|||
}
|
||||
|
||||
/// Batch typecheck project and print summary statistics
|
||||
cmd analysis-stats
|
||||
cmd analysis-stats {
|
||||
/// Directory with Cargo.toml.
|
||||
required path: PathBuf
|
||||
{
|
||||
|
||||
optional --output format: OutputFormat
|
||||
|
||||
/// Randomize order in which crates, modules, and items are processed.
|
||||
|
@ -84,38 +82,37 @@ xflags::xflags! {
|
|||
optional --skip-inference
|
||||
}
|
||||
|
||||
cmd diagnostics
|
||||
cmd diagnostics {
|
||||
/// Directory with Cargo.toml.
|
||||
required path: PathBuf
|
||||
{
|
||||
|
||||
/// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
|
||||
optional --disable-build-scripts
|
||||
/// Don't use expand proc macros.
|
||||
optional --disable-proc-macros
|
||||
}
|
||||
|
||||
cmd ssr
|
||||
cmd ssr {
|
||||
/// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
|
||||
repeated rule: SsrRule
|
||||
{}
|
||||
}
|
||||
|
||||
cmd search
|
||||
cmd search {
|
||||
/// A structured search replace pattern (`$a.foo($b)`)
|
||||
repeated pattern: SsrPattern
|
||||
{
|
||||
/// Prints debug information for any nodes with source exactly equal to snippet.
|
||||
optional --debug snippet: String
|
||||
}
|
||||
|
||||
cmd proc-macro {}
|
||||
|
||||
cmd lsif
|
||||
cmd lsif {
|
||||
required path: PathBuf
|
||||
{}
|
||||
}
|
||||
|
||||
cmd scip
|
||||
cmd scip {
|
||||
required path: PathBuf
|
||||
{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -150,7 +147,6 @@ pub enum RustAnalyzerCmd {
|
|||
#[derive(Debug)]
|
||||
pub struct LspServer {
|
||||
pub version: bool,
|
||||
pub help: bool,
|
||||
pub print_config_schema: bool,
|
||||
}
|
||||
|
||||
|
@ -218,7 +214,10 @@ pub struct Scip {
|
|||
}
|
||||
|
||||
impl RustAnalyzer {
|
||||
pub const HELP: &'static str = Self::HELP_;
|
||||
#[allow(dead_code)]
|
||||
pub fn from_env_or_exit() -> Self {
|
||||
Self::from_env_or_exit_()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn from_env() -> xflags::Result<Self> {
|
||||
|
|
|
@ -6,7 +6,7 @@ use anyhow::Result;
|
|||
use crossbeam_channel::{unbounded, Receiver};
|
||||
use hir::db::DefDatabase;
|
||||
use ide::{AnalysisHost, Change};
|
||||
use ide_db::base_db::CrateGraph;
|
||||
use ide_db::{base_db::CrateGraph, FxHashMap};
|
||||
use proc_macro_api::ProcMacroServer;
|
||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
|
||||
use vfs::{loader::Handle, AbsPath, AbsPathBuf};
|
||||
|
@ -38,7 +38,7 @@ pub fn load_workspace_at(
|
|||
workspace.set_build_scripts(build_scripts)
|
||||
}
|
||||
|
||||
load_workspace(workspace, cargo_config, load_config)
|
||||
load_workspace(workspace, &cargo_config.extra_env, load_config)
|
||||
}
|
||||
|
||||
// Note: Since this function is used by external tools that use rust-analyzer as a library
|
||||
|
@ -48,7 +48,7 @@ pub fn load_workspace_at(
|
|||
// these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides.
|
||||
pub fn load_workspace(
|
||||
ws: ProjectWorkspace,
|
||||
cargo_config: &CargoConfig,
|
||||
extra_env: &FxHashMap<String, String>,
|
||||
load_config: &LoadCargoConfig,
|
||||
) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
|
||||
let (sender, receiver) = unbounded();
|
||||
|
@ -60,10 +60,26 @@ pub fn load_workspace(
|
|||
};
|
||||
|
||||
let proc_macro_client = if load_config.with_proc_macro {
|
||||
let path = AbsPathBuf::assert(std::env::current_exe()?);
|
||||
Ok(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap())
|
||||
let mut path = AbsPathBuf::assert(std::env::current_exe()?);
|
||||
let mut args = vec!["proc-macro"];
|
||||
|
||||
if let ProjectWorkspace::Cargo { sysroot, .. } | ProjectWorkspace::Json { sysroot, .. } =
|
||||
&ws
|
||||
{
|
||||
if let Some(sysroot) = sysroot.as_ref() {
|
||||
let standalone_server_name =
|
||||
format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
|
||||
let server_path = sysroot.root().join("libexec").join(&standalone_server_name);
|
||||
if std::fs::metadata(&server_path).is_ok() {
|
||||
path = server_path;
|
||||
args = vec![];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|e| e.to_string())
|
||||
} else {
|
||||
Err("proc macro server not started".to_owned())
|
||||
Err("proc macro server disabled".to_owned())
|
||||
};
|
||||
|
||||
let crate_graph = ws.to_crate_graph(
|
||||
|
@ -76,7 +92,7 @@ pub fn load_workspace(
|
|||
vfs.set_file_contents(path.clone(), contents);
|
||||
vfs.file_id(&path)
|
||||
},
|
||||
cargo_config,
|
||||
extra_env,
|
||||
);
|
||||
|
||||
let project_folders = ProjectFolders::new(&[ws], &[]);
|
||||
|
|
|
@ -300,7 +300,7 @@ impl flags::Lsif {
|
|||
let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
|
||||
|
||||
let (host, vfs, _proc_macro) =
|
||||
load_workspace(workspace, &cargo_config, &load_cargo_config)?;
|
||||
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
|
||||
let db = host.raw_database();
|
||||
let analysis = host.analysis();
|
||||
|
||||
|
|
|
@ -40,7 +40,8 @@ impl flags::Scip {
|
|||
|
||||
let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
|
||||
|
||||
let (host, vfs, _) = load_workspace(workspace, &cargo_config, &load_cargo_config)?;
|
||||
let (host, vfs, _) =
|
||||
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
|
||||
let db = host.raw_database();
|
||||
let analysis = host.analysis();
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
//! configure the server itself, feature flags are passed into analysis, and
|
||||
//! tweak things like automatic insertion of `()` in completions.
|
||||
|
||||
use std::{ffi::OsString, fmt, iter, path::PathBuf};
|
||||
use std::{fmt, iter, path::PathBuf};
|
||||
|
||||
use flycheck::FlycheckConfig;
|
||||
use ide::{
|
||||
|
@ -22,7 +22,8 @@ use ide_db::{
|
|||
use itertools::Itertools;
|
||||
use lsp_types::{ClientCapabilities, MarkupKind};
|
||||
use project_model::{
|
||||
CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, UnsetTestCrates,
|
||||
CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource,
|
||||
UnsetTestCrates,
|
||||
};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::{de::DeserializeOwned, Deserialize};
|
||||
|
@ -90,11 +91,16 @@ config_data! {
|
|||
/// List of features to activate.
|
||||
///
|
||||
/// Set this to `"all"` to pass `--all-features` to cargo.
|
||||
cargo_features: CargoFeatures = "[]",
|
||||
cargo_features: CargoFeaturesDef = "[]",
|
||||
/// Whether to pass `--no-default-features` to cargo.
|
||||
cargo_noDefaultFeatures: bool = "false",
|
||||
/// Internal config for debugging, disables loading of sysroot crates.
|
||||
cargo_noSysroot: bool = "false",
|
||||
/// Relative path to the sysroot, or "discover" to try to automatically find it via
|
||||
/// "rustc --print sysroot".
|
||||
///
|
||||
/// Unsetting this disables sysroot loading.
|
||||
///
|
||||
/// This option does not take effect until rust-analyzer is restarted.
|
||||
cargo_sysroot: Option<String> = "\"discover\"",
|
||||
/// Compilation target override (target triple).
|
||||
cargo_target: Option<String> = "null",
|
||||
/// Unsets `#[cfg(test)]` for the specified crates.
|
||||
|
@ -109,12 +115,13 @@ config_data! {
|
|||
/// Extra arguments for `cargo check`.
|
||||
checkOnSave_extraArgs: Vec<String> = "[]",
|
||||
/// Extra environment variables that will be set when running `cargo check`.
|
||||
/// Extends `#rust-analyzer.cargo.extraEnv#`.
|
||||
checkOnSave_extraEnv: FxHashMap<String, String> = "{}",
|
||||
/// List of features to activate. Defaults to
|
||||
/// `#rust-analyzer.cargo.features#`.
|
||||
///
|
||||
/// Set to `"all"` to pass `--all-features` to Cargo.
|
||||
checkOnSave_features: Option<CargoFeatures> = "null",
|
||||
checkOnSave_features: Option<CargoFeaturesDef> = "null",
|
||||
/// Whether to pass `--no-default-features` to Cargo. Defaults to
|
||||
/// `#rust-analyzer.cargo.noDefaultFeatures#`.
|
||||
checkOnSave_noDefaultFeatures: Option<bool> = "null",
|
||||
|
@ -975,15 +982,17 @@ impl Config {
|
|||
self.data.lru_capacity
|
||||
}
|
||||
|
||||
pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, Vec<OsString>)> {
|
||||
pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, /* is path explicitly set */ bool)> {
|
||||
if !self.data.procMacro_enable {
|
||||
return None;
|
||||
}
|
||||
let path = match &self.data.procMacro_server {
|
||||
Some(it) => self.root_path.join(it),
|
||||
None => AbsPathBuf::assert(std::env::current_exe().ok()?),
|
||||
};
|
||||
Some((path, vec!["proc-macro".into()]))
|
||||
Some(match &self.data.procMacro_server {
|
||||
Some(it) => (
|
||||
AbsPathBuf::try_from(it.clone()).unwrap_or_else(|path| self.root_path.join(path)),
|
||||
true,
|
||||
),
|
||||
None => (AbsPathBuf::assert(std::env::current_exe().ok()?), false),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn dummy_replacements(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> {
|
||||
|
@ -1026,16 +1035,24 @@ impl Config {
|
|||
RustcSource::Path(self.root_path.join(rustc_src))
|
||||
}
|
||||
});
|
||||
let sysroot = self.data.cargo_sysroot.as_ref().map(|sysroot| {
|
||||
if sysroot == "discover" {
|
||||
RustcSource::Discover
|
||||
} else {
|
||||
RustcSource::Path(self.root_path.join(sysroot))
|
||||
}
|
||||
});
|
||||
|
||||
CargoConfig {
|
||||
no_default_features: self.data.cargo_noDefaultFeatures,
|
||||
all_features: matches!(self.data.cargo_features, CargoFeatures::All),
|
||||
features: match &self.data.cargo_features {
|
||||
CargoFeatures::All => vec![],
|
||||
CargoFeatures::Listed(it) => it.clone(),
|
||||
CargoFeaturesDef::All => CargoFeatures::All,
|
||||
CargoFeaturesDef::Selected(features) => CargoFeatures::Selected {
|
||||
features: features.clone(),
|
||||
no_default_features: self.data.cargo_noDefaultFeatures,
|
||||
},
|
||||
},
|
||||
target: self.data.cargo_target.clone(),
|
||||
no_sysroot: self.data.cargo_noSysroot,
|
||||
sysroot,
|
||||
rustc_source,
|
||||
unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
|
||||
wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
|
||||
|
@ -1086,7 +1103,7 @@ impl Config {
|
|||
.unwrap_or(self.data.cargo_noDefaultFeatures),
|
||||
all_features: matches!(
|
||||
self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features),
|
||||
CargoFeatures::All
|
||||
CargoFeaturesDef::All
|
||||
),
|
||||
features: match self
|
||||
.data
|
||||
|
@ -1094,8 +1111,8 @@ impl Config {
|
|||
.clone()
|
||||
.unwrap_or_else(|| self.data.cargo_features.clone())
|
||||
{
|
||||
CargoFeatures::All => vec![],
|
||||
CargoFeatures::Listed(it) => it,
|
||||
CargoFeaturesDef::All => vec![],
|
||||
CargoFeaturesDef::Selected(it) => it,
|
||||
},
|
||||
extra_args: self.data.checkOnSave_extraArgs.clone(),
|
||||
extra_env: self.check_on_save_extra_env(),
|
||||
|
@ -1564,10 +1581,10 @@ enum CallableCompletionDef {
|
|||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged)]
|
||||
enum CargoFeatures {
|
||||
enum CargoFeaturesDef {
|
||||
#[serde(deserialize_with = "de_unit_v::all")]
|
||||
All,
|
||||
Listed(Vec<String>),
|
||||
Selected(Vec<String>),
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
|
@ -1912,7 +1929,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
|
|||
"Only show mutable reborrow hints."
|
||||
]
|
||||
},
|
||||
"CargoFeatures" => set! {
|
||||
"CargoFeaturesDef" => set! {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
|
@ -1929,7 +1946,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
|
|||
}
|
||||
],
|
||||
},
|
||||
"Option<CargoFeatures>" => set! {
|
||||
"Option<CargoFeaturesDef>" => set! {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
|
|
|
@ -95,22 +95,22 @@ pub(crate) fn annotation(
|
|||
|
||||
match resolve {
|
||||
lsp_ext::CodeLensResolveData::Impls(params) => {
|
||||
let file_id =
|
||||
snap.url_to_file_id(¶ms.text_document_position_params.text_document.uri)?;
|
||||
let pos @ FilePosition { file_id, .. } =
|
||||
file_position(snap, params.text_document_position_params)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
Ok(Annotation {
|
||||
range: text_range(&line_index, code_lens.range)?,
|
||||
kind: AnnotationKind::HasImpls { file_id, data: None },
|
||||
kind: AnnotationKind::HasImpls { pos, data: None },
|
||||
})
|
||||
}
|
||||
lsp_ext::CodeLensResolveData::References(params) => {
|
||||
let file_id = snap.url_to_file_id(¶ms.text_document.uri)?;
|
||||
let pos @ FilePosition { file_id, .. } = file_position(snap, params)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
Ok(Annotation {
|
||||
range: text_range(&line_index, code_lens.range)?,
|
||||
kind: AnnotationKind::HasReferences { file_id, data: None },
|
||||
kind: AnnotationKind::HasReferences { pos, data: None },
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -185,11 +185,48 @@ impl GlobalState {
|
|||
let (change, changed_files) = {
|
||||
let mut change = Change::new();
|
||||
let (vfs, line_endings_map) = &mut *self.vfs.write();
|
||||
let changed_files = vfs.take_changes();
|
||||
let mut changed_files = vfs.take_changes();
|
||||
if changed_files.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// important: this needs to be a stable sort, the order between changes is relevant
|
||||
// for the same file ids
|
||||
changed_files.sort_by_key(|file| file.file_id);
|
||||
// We need to fix up the changed events a bit, if we have a create or modify for a file
|
||||
// id that is followed by a delete we actually no longer observe the file text from the
|
||||
// create or modify which may cause problems later on
|
||||
changed_files.dedup_by(|a, b| {
|
||||
use vfs::ChangeKind::*;
|
||||
|
||||
if a.file_id != b.file_id {
|
||||
return false;
|
||||
}
|
||||
|
||||
match (a.change_kind, b.change_kind) {
|
||||
// duplicate can be merged
|
||||
(Create, Create) | (Modify, Modify) | (Delete, Delete) => true,
|
||||
// just leave the create, modify is irrelevant
|
||||
(Create, Modify) => {
|
||||
std::mem::swap(a, b);
|
||||
true
|
||||
}
|
||||
// modify becomes irrelevant if the file is deleted
|
||||
(Modify, Delete) => true,
|
||||
// we should fully remove this occurrence,
|
||||
// but leaving just a delete works as well
|
||||
(Create, Delete) => true,
|
||||
// this is equivalent to a modify
|
||||
(Delete, Create) => {
|
||||
a.change_kind = Modify;
|
||||
true
|
||||
}
|
||||
// can't really occur
|
||||
(Modify, Create) => false,
|
||||
(Delete, Modify) => false,
|
||||
}
|
||||
});
|
||||
|
||||
for file in &changed_files {
|
||||
if let Some(path) = vfs.file_path(file.file_id).as_path() {
|
||||
let path = path.to_path_buf();
|
||||
|
@ -317,6 +354,10 @@ impl GlobalState {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_completed(&self, request: &lsp_server::Request) -> bool {
|
||||
self.req_queue.incoming.is_completed(&request.id)
|
||||
}
|
||||
|
||||
fn send(&mut self, message: lsp_server::Message) {
|
||||
self.sender.send(message).unwrap()
|
||||
}
|
||||
|
|
|
@ -425,7 +425,9 @@ impl GlobalState {
|
|||
fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) {
|
||||
match task {
|
||||
Task::Response(response) => self.respond(response),
|
||||
Task::Retry(req) => self.on_request(req),
|
||||
// Only retry requests that haven't been cancelled. Otherwise we do unnecessary work.
|
||||
Task::Retry(req) if !self.is_completed(&req) => self.on_request(req),
|
||||
Task::Retry(_) => (),
|
||||
Task::Diagnostics(diagnostics_per_file) => {
|
||||
for (file_id, diagnostics) in diagnostics_per_file {
|
||||
self.diagnostics.set_native_diagnostics(file_id, diagnostics)
|
||||
|
|
|
@ -143,7 +143,7 @@ impl GlobalState {
|
|||
project_model::ProjectWorkspace::load_inline(
|
||||
it.clone(),
|
||||
cargo_config.target.as_deref(),
|
||||
&cargo_config,
|
||||
&cargo_config.extra_env,
|
||||
)
|
||||
}
|
||||
})
|
||||
|
@ -306,41 +306,50 @@ impl GlobalState {
|
|||
format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
|
||||
|
||||
if self.proc_macro_clients.is_empty() {
|
||||
if let Some((path, args)) = self.config.proc_macro_srv() {
|
||||
if let Some((path, path_manually_set)) = self.config.proc_macro_srv() {
|
||||
tracing::info!("Spawning proc-macro servers");
|
||||
self.proc_macro_clients = self
|
||||
.workspaces
|
||||
.iter()
|
||||
.map(|ws| {
|
||||
let mut args = args.clone();
|
||||
let mut path = path.clone();
|
||||
|
||||
if let ProjectWorkspace::Cargo { sysroot, .. }
|
||||
| ProjectWorkspace::Json { sysroot, .. } = ws
|
||||
{
|
||||
tracing::debug!("Found a cargo workspace...");
|
||||
if let Some(sysroot) = sysroot.as_ref() {
|
||||
tracing::debug!("Found a cargo workspace with a sysroot...");
|
||||
let server_path =
|
||||
sysroot.root().join("libexec").join(&standalone_server_name);
|
||||
if std::fs::metadata(&server_path).is_ok() {
|
||||
tracing::debug!(
|
||||
"And the server exists at {}",
|
||||
server_path.display()
|
||||
);
|
||||
path = server_path;
|
||||
args = vec![];
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"And the server does not exist at {}",
|
||||
server_path.display()
|
||||
);
|
||||
let (path, args) = if path_manually_set {
|
||||
tracing::debug!(
|
||||
"Pro-macro server path explicitly set: {}",
|
||||
path.display()
|
||||
);
|
||||
(path.clone(), vec![])
|
||||
} else {
|
||||
let mut sysroot_server = None;
|
||||
if let ProjectWorkspace::Cargo { sysroot, .. }
|
||||
| ProjectWorkspace::Json { sysroot, .. } = ws
|
||||
{
|
||||
if let Some(sysroot) = sysroot.as_ref() {
|
||||
let server_path = sysroot
|
||||
.root()
|
||||
.join("libexec")
|
||||
.join(&standalone_server_name);
|
||||
if std::fs::metadata(&server_path).is_ok() {
|
||||
tracing::debug!(
|
||||
"Sysroot proc-macro server exists at {}",
|
||||
server_path.display()
|
||||
);
|
||||
sysroot_server = Some(server_path);
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Sysroot proc-macro server does not exist at {}",
|
||||
server_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
sysroot_server.map_or_else(
|
||||
|| (path.clone(), vec!["proc-macro".to_owned()]),
|
||||
|path| (path, vec![]),
|
||||
)
|
||||
};
|
||||
|
||||
tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
|
||||
ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
|
||||
ProcMacroServer::spawn(path.clone(), args).map_err(|err| {
|
||||
let error = format!(
|
||||
"Failed to run proc-macro server from path {}, error: {:?}",
|
||||
path.display(),
|
||||
|
@ -402,7 +411,7 @@ impl GlobalState {
|
|||
crate_graph.extend(ws.to_crate_graph(
|
||||
&mut load_proc_macro,
|
||||
&mut load,
|
||||
&self.config.cargo(),
|
||||
&self.config.cargo().extra_env,
|
||||
));
|
||||
}
|
||||
crate_graph
|
||||
|
|
|
@ -1177,13 +1177,13 @@ pub(crate) fn code_lens(
|
|||
})
|
||||
}
|
||||
}
|
||||
AnnotationKind::HasImpls { file_id, data } => {
|
||||
AnnotationKind::HasImpls { pos: file_range, data } => {
|
||||
if !client_commands_config.show_reference {
|
||||
return Ok(());
|
||||
}
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let line_index = snap.file_line_index(file_range.file_id)?;
|
||||
let annotation_range = range(&line_index, annotation.range);
|
||||
let url = url(snap, file_id);
|
||||
let url = url(snap, file_range.file_id);
|
||||
|
||||
let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
|
||||
|
||||
|
@ -1221,13 +1221,13 @@ pub(crate) fn code_lens(
|
|||
data: Some(to_value(lsp_ext::CodeLensResolveData::Impls(goto_params)).unwrap()),
|
||||
})
|
||||
}
|
||||
AnnotationKind::HasReferences { file_id, data } => {
|
||||
AnnotationKind::HasReferences { pos: file_range, data } => {
|
||||
if !client_commands_config.show_reference {
|
||||
return Ok(());
|
||||
}
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let line_index = snap.file_line_index(file_range.file_id)?;
|
||||
let annotation_range = range(&line_index, annotation.range);
|
||||
let url = url(snap, file_id);
|
||||
let url = url(snap, file_range.file_id);
|
||||
|
||||
let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@ mod tidy;
|
|||
|
||||
use std::{collections::HashMap, path::PathBuf, time::Instant};
|
||||
|
||||
use expect_test::expect;
|
||||
use lsp_types::{
|
||||
notification::DidOpenTextDocument,
|
||||
request::{
|
||||
|
@ -60,7 +59,7 @@ use std::collections::Spam;
|
|||
"#,
|
||||
)
|
||||
.with_config(serde_json::json!({
|
||||
"cargo": { "noSysroot": false }
|
||||
"cargo": { "sysroot": "discover" }
|
||||
}))
|
||||
.server()
|
||||
.wait_until_workspace_is_loaded();
|
||||
|
@ -615,7 +614,7 @@ fn main() {{}}
|
|||
librs, libs
|
||||
))
|
||||
.with_config(serde_json::json!({
|
||||
"cargo": { "noSysroot": false }
|
||||
"cargo": { "sysroot": "discover" }
|
||||
}))
|
||||
.server()
|
||||
.wait_until_workspace_is_loaded();
|
||||
|
@ -743,7 +742,7 @@ fn main() {
|
|||
"buildScripts": {
|
||||
"enable": true
|
||||
},
|
||||
"noSysroot": true,
|
||||
"sysroot": null,
|
||||
}
|
||||
}))
|
||||
.server()
|
||||
|
@ -821,7 +820,10 @@ fn main() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
// FIXME: Re-enable once we can run proc-macro tests on rust-lang/rust-analyzer again
|
||||
#[cfg(any())]
|
||||
fn resolve_proc_macro() {
|
||||
use expect_test::expect;
|
||||
if skip_slow_tests() {
|
||||
return;
|
||||
}
|
||||
|
@ -898,7 +900,7 @@ pub fn foo(_input: TokenStream) -> TokenStream {
|
|||
"buildScripts": {
|
||||
"enable": true
|
||||
},
|
||||
"noSysroot": true,
|
||||
"sysroot": null,
|
||||
},
|
||||
"procMacro": {
|
||||
"enable": true,
|
||||
|
|
|
@ -34,7 +34,7 @@ impl<'a> Project<'a> {
|
|||
config: serde_json::json!({
|
||||
"cargo": {
|
||||
// Loading standard library is costly, let's ignore it by default
|
||||
"noSysroot": true,
|
||||
"sysroot": null,
|
||||
// Can't use test binary as rustc wrapper.
|
||||
"buildScripts": {
|
||||
"useRustcWrapper": false
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue