mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-02 22:54:58 +00:00
Apply suggested changes
This commit is contained in:
parent
29e5cdfb05
commit
23a5f31ff4
18 changed files with 1878 additions and 299 deletions
10
Cargo.lock
generated
10
Cargo.lock
generated
|
@ -328,6 +328,15 @@ dependencies = [
|
||||||
"dirs-sys",
|
"dirs-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dirs"
|
||||||
|
version = "5.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
|
||||||
|
dependencies = [
|
||||||
|
"dirs-sys",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dirs-sys"
|
name = "dirs-sys"
|
||||||
version = "0.4.1"
|
version = "0.4.1"
|
||||||
|
@ -1665,6 +1674,7 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cfg",
|
"cfg",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
|
"dirs",
|
||||||
"dissimilar",
|
"dissimilar",
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"flycheck",
|
"flycheck",
|
||||||
|
|
|
@ -273,10 +273,17 @@ impl Analysis {
|
||||||
self.with_db(|db| status::status(db, file_id))
|
self.with_db(|db| status::status(db, file_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn source_root(&self, file_id: FileId) -> Cancellable<SourceRootId> {
|
pub fn source_root_id(&self, file_id: FileId) -> Cancellable<SourceRootId> {
|
||||||
self.with_db(|db| db.file_source_root(file_id))
|
self.with_db(|db| db.file_source_root(file_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_local_source_root(&self, source_root_id: SourceRootId) -> Cancellable<bool> {
|
||||||
|
self.with_db(|db| {
|
||||||
|
let sr = db.source_root(source_root_id);
|
||||||
|
!sr.is_library
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
|
pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
|
||||||
where
|
where
|
||||||
F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
|
F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
|
||||||
|
|
|
@ -135,6 +135,24 @@ impl AbsPathBuf {
|
||||||
pub fn pop(&mut self) -> bool {
|
pub fn pop(&mut self) -> bool {
|
||||||
self.0.pop()
|
self.0.pop()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Equivalent of [`PathBuf::push`] for `AbsPathBuf`.
|
||||||
|
///
|
||||||
|
/// Extends `self` with `path`.
|
||||||
|
///
|
||||||
|
/// If `path` is absolute, it replaces the current path.
|
||||||
|
///
|
||||||
|
/// On Windows:
|
||||||
|
///
|
||||||
|
/// * if `path` has a root but no prefix (e.g., `\windows`), it
|
||||||
|
/// replaces everything except for the prefix (if any) of `self`.
|
||||||
|
/// * if `path` has a prefix but no root, it replaces `self`.
|
||||||
|
/// * if `self` has a verbatim prefix (e.g. `\\?\C:\windows`)
|
||||||
|
/// and `path` is not empty, the new path is normalized: all references
|
||||||
|
/// to `.` and `..` are removed.
|
||||||
|
pub fn push(&mut self, suffix: &str) {
|
||||||
|
self.0.push(suffix)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for AbsPathBuf {
|
impl fmt::Display for AbsPathBuf {
|
||||||
|
|
|
@ -22,6 +22,7 @@ path = "src/bin/main.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
crossbeam-channel = "0.5.5"
|
crossbeam-channel = "0.5.5"
|
||||||
|
dirs = "5.0.1"
|
||||||
dissimilar.workspace = true
|
dissimilar.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
scip = "0.3.3"
|
scip = "0.3.3"
|
||||||
|
|
|
@ -15,7 +15,11 @@ use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use lsp_server::Connection;
|
use lsp_server::Connection;
|
||||||
use rust_analyzer::{cli::flags, config::Config, from_json};
|
use rust_analyzer::{
|
||||||
|
cli::flags,
|
||||||
|
config::{Config, ConfigChange, ConfigError},
|
||||||
|
from_json,
|
||||||
|
};
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use tracing_subscriber::fmt::writer::BoxMakeWriter;
|
use tracing_subscriber::fmt::writer::BoxMakeWriter;
|
||||||
use vfs::AbsPathBuf;
|
use vfs::AbsPathBuf;
|
||||||
|
@ -220,16 +224,20 @@ fn run_server() -> anyhow::Result<()> {
|
||||||
.filter(|workspaces| !workspaces.is_empty())
|
.filter(|workspaces| !workspaces.is_empty())
|
||||||
.unwrap_or_else(|| vec![root_path.clone()]);
|
.unwrap_or_else(|| vec![root_path.clone()]);
|
||||||
let mut config =
|
let mut config =
|
||||||
Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version);
|
Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version, None);
|
||||||
if let Some(json) = initialization_options {
|
if let Some(json) = initialization_options {
|
||||||
if let Err(e) = config.update(json) {
|
let mut change = ConfigChange::default();
|
||||||
|
change.change_client_config(json);
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
|
if !error_sink.is_empty() {
|
||||||
use lsp_types::{
|
use lsp_types::{
|
||||||
notification::{Notification, ShowMessage},
|
notification::{Notification, ShowMessage},
|
||||||
MessageType, ShowMessageParams,
|
MessageType, ShowMessageParams,
|
||||||
};
|
};
|
||||||
let not = lsp_server::Notification::new(
|
let not = lsp_server::Notification::new(
|
||||||
ShowMessage::METHOD.to_owned(),
|
ShowMessage::METHOD.to_owned(),
|
||||||
ShowMessageParams { typ: MessageType::WARNING, message: e.to_string() },
|
ShowMessageParams { typ: MessageType::WARNING, message: error_sink.to_string() },
|
||||||
);
|
);
|
||||||
connection.sender.send(lsp_server::Message::Notification(not)).unwrap();
|
connection.sender.send(lsp_server::Message::Notification(not)).unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,9 +10,11 @@ use ide_db::LineIndexDatabase;
|
||||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use scip::types as scip_types;
|
use scip::types as scip_types;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
cli::flags,
|
cli::flags,
|
||||||
|
config::{ConfigChange, ConfigError},
|
||||||
line_index::{LineEndings, LineIndex, PositionEncoding},
|
line_index::{LineEndings, LineIndex, PositionEncoding},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -35,12 +37,18 @@ impl flags::Scip {
|
||||||
lsp_types::ClientCapabilities::default(),
|
lsp_types::ClientCapabilities::default(),
|
||||||
vec![],
|
vec![],
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(p) = self.config_path {
|
if let Some(p) = self.config_path {
|
||||||
let mut file = std::io::BufReader::new(std::fs::File::open(p)?);
|
let mut file = std::io::BufReader::new(std::fs::File::open(p)?);
|
||||||
let json = serde_json::from_reader(&mut file)?;
|
let json = serde_json::from_reader(&mut file)?;
|
||||||
config.update(json)?;
|
let mut change = ConfigChange::default();
|
||||||
|
change.change_client_config(json);
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
|
// FIXME @alibektas : What happens to errors without logging?
|
||||||
|
error!(?error_sink, "Config Error(s)");
|
||||||
}
|
}
|
||||||
let cargo_config = config.cargo();
|
let cargo_config = config.cargo();
|
||||||
let (db, vfs, _) = load_workspace_at(
|
let (db, vfs, _) = load_workspace_at(
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
//! Config used by the language server.
|
//! Config used by the language server.
|
||||||
//!
|
//!
|
||||||
//! We currently get this config from `initialize` LSP request, which is not the
|
|
||||||
//! best way to do it, but was the simplest thing we could implement.
|
|
||||||
//!
|
|
||||||
//! Of particular interest is the `feature_flags` hash map: while other fields
|
//! Of particular interest is the `feature_flags` hash map: while other fields
|
||||||
//! configure the server itself, feature flags are passed into analysis, and
|
//! configure the server itself, feature flags are passed into analysis, and
|
||||||
//! tweak things like automatic insertion of `()` in completions.
|
//! tweak things like automatic insertion of `()` in completions.
|
||||||
use std::{fmt, iter, ops::Not};
|
use std::{fmt, iter, ops::Not};
|
||||||
|
|
||||||
use cfg::{CfgAtom, CfgDiff};
|
use cfg::{CfgAtom, CfgDiff};
|
||||||
|
use dirs::config_dir;
|
||||||
use flycheck::{CargoOptions, FlycheckConfig};
|
use flycheck::{CargoOptions, FlycheckConfig};
|
||||||
use ide::{
|
use ide::{
|
||||||
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
|
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
|
||||||
|
@ -29,9 +27,14 @@ use project_model::{
|
||||||
};
|
};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{
|
||||||
|
de::{DeserializeOwned, Error},
|
||||||
|
ser::SerializeStruct,
|
||||||
|
Deserialize, Serialize,
|
||||||
|
};
|
||||||
use stdx::format_to_acc;
|
use stdx::format_to_acc;
|
||||||
use vfs::{AbsPath, AbsPathBuf};
|
use triomphe::Arc;
|
||||||
|
use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
caps::completion_item_edit_resolve,
|
caps::completion_item_edit_resolve,
|
||||||
|
@ -67,12 +70,6 @@ config_data! {
|
||||||
///
|
///
|
||||||
/// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
|
/// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
|
||||||
global: struct GlobalDefaultConfigData <- GlobalConfigInput -> {
|
global: struct GlobalDefaultConfigData <- GlobalConfigInput -> {
|
||||||
/// Whether to insert #[must_use] when generating `as_` methods
|
|
||||||
/// for enum variants.
|
|
||||||
assist_emitMustUse: bool = false,
|
|
||||||
/// Placeholder expression to use for missing expressions in assists.
|
|
||||||
assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
|
|
||||||
|
|
||||||
/// Warm up caches on project load.
|
/// Warm up caches on project load.
|
||||||
cachePriming_enable: bool = true,
|
cachePriming_enable: bool = true,
|
||||||
/// How many worker threads to handle priming caches. The default `0` means to pick automatically.
|
/// How many worker threads to handle priming caches. The default `0` means to pick automatically.
|
||||||
|
@ -250,6 +247,71 @@ config_data! {
|
||||||
/// If false, `-p <package>` will be passed instead.
|
/// If false, `-p <package>` will be passed instead.
|
||||||
check_workspace: bool = true,
|
check_workspace: bool = true,
|
||||||
|
|
||||||
|
|
||||||
|
/// Toggles the additional completions that automatically add imports when completed.
|
||||||
|
/// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
|
||||||
|
completion_autoimport_enable: bool = true,
|
||||||
|
/// Toggles the additional completions that automatically show method calls and field accesses
|
||||||
|
/// with `self` prefixed to them when inside a method.
|
||||||
|
completion_autoself_enable: bool = true,
|
||||||
|
/// Whether to add parenthesis and argument snippets when completing function.
|
||||||
|
completion_callable_snippets: CallableCompletionDef = CallableCompletionDef::FillArguments,
|
||||||
|
/// Whether to show full function/method signatures in completion docs.
|
||||||
|
completion_fullFunctionSignatures_enable: bool = false,
|
||||||
|
/// Maximum number of completions to return. If `None`, the limit is infinite.
|
||||||
|
completion_limit: Option<usize> = None,
|
||||||
|
/// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
|
||||||
|
completion_postfix_enable: bool = true,
|
||||||
|
/// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
|
||||||
|
completion_privateEditable_enable: bool = false,
|
||||||
|
/// Custom completion snippets.
|
||||||
|
// NOTE: we use IndexMap for deterministic serialization ordering
|
||||||
|
completion_snippets_custom: IndexMap<String, SnippetDef> = serde_json::from_str(r#"{
|
||||||
|
"Arc::new": {
|
||||||
|
"postfix": "arc",
|
||||||
|
"body": "Arc::new(${receiver})",
|
||||||
|
"requires": "std::sync::Arc",
|
||||||
|
"description": "Put the expression into an `Arc`",
|
||||||
|
"scope": "expr"
|
||||||
|
},
|
||||||
|
"Rc::new": {
|
||||||
|
"postfix": "rc",
|
||||||
|
"body": "Rc::new(${receiver})",
|
||||||
|
"requires": "std::rc::Rc",
|
||||||
|
"description": "Put the expression into an `Rc`",
|
||||||
|
"scope": "expr"
|
||||||
|
},
|
||||||
|
"Box::pin": {
|
||||||
|
"postfix": "pinbox",
|
||||||
|
"body": "Box::pin(${receiver})",
|
||||||
|
"requires": "std::boxed::Box",
|
||||||
|
"description": "Put the expression into a pinned `Box`",
|
||||||
|
"scope": "expr"
|
||||||
|
},
|
||||||
|
"Ok": {
|
||||||
|
"postfix": "ok",
|
||||||
|
"body": "Ok(${receiver})",
|
||||||
|
"description": "Wrap the expression in a `Result::Ok`",
|
||||||
|
"scope": "expr"
|
||||||
|
},
|
||||||
|
"Err": {
|
||||||
|
"postfix": "err",
|
||||||
|
"body": "Err(${receiver})",
|
||||||
|
"description": "Wrap the expression in a `Result::Err`",
|
||||||
|
"scope": "expr"
|
||||||
|
},
|
||||||
|
"Some": {
|
||||||
|
"postfix": "some",
|
||||||
|
"body": "Some(${receiver})",
|
||||||
|
"description": "Wrap the expression in an `Option::Some`",
|
||||||
|
"scope": "expr"
|
||||||
|
}
|
||||||
|
}"#).unwrap(),
|
||||||
|
/// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
|
||||||
|
completion_termSearch_enable: bool = false,
|
||||||
|
/// Term search fuel in "units of work" for autocompletion (Defaults to 200).
|
||||||
|
completion_termSearch_fuel: usize = 200,
|
||||||
|
|
||||||
/// List of rust-analyzer diagnostics to disable.
|
/// List of rust-analyzer diagnostics to disable.
|
||||||
diagnostics_disabled: FxHashSet<String> = FxHashSet::default(),
|
diagnostics_disabled: FxHashSet<String> = FxHashSet::default(),
|
||||||
/// Whether to show native rust-analyzer diagnostics.
|
/// Whether to show native rust-analyzer diagnostics.
|
||||||
|
@ -451,76 +513,16 @@ config_data! {
|
||||||
}
|
}
|
||||||
|
|
||||||
config_data! {
|
config_data! {
|
||||||
/// Local configurations can be overridden for every crate by placing a `rust-analyzer.toml` on crate root.
|
/// Local configurations can be defined per `SourceRoot`. This almost always corresponds to a `Crate`.
|
||||||
/// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
|
|
||||||
local: struct LocalDefaultConfigData <- LocalConfigInput -> {
|
local: struct LocalDefaultConfigData <- LocalConfigInput -> {
|
||||||
|
/// Whether to insert #[must_use] when generating `as_` methods
|
||||||
|
/// for enum variants.
|
||||||
|
assist_emitMustUse: bool = false,
|
||||||
|
/// Placeholder expression to use for missing expressions in assists.
|
||||||
|
assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
|
||||||
/// Term search fuel in "units of work" for assists (Defaults to 400).
|
/// Term search fuel in "units of work" for assists (Defaults to 400).
|
||||||
assist_termSearch_fuel: usize = 400,
|
assist_termSearch_fuel: usize = 400,
|
||||||
|
|
||||||
/// Toggles the additional completions that automatically add imports when completed.
|
|
||||||
/// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
|
|
||||||
completion_autoimport_enable: bool = true,
|
|
||||||
/// Toggles the additional completions that automatically show method calls and field accesses
|
|
||||||
/// with `self` prefixed to them when inside a method.
|
|
||||||
completion_autoself_enable: bool = true,
|
|
||||||
/// Whether to add parenthesis and argument snippets when completing function.
|
|
||||||
completion_callable_snippets: CallableCompletionDef = CallableCompletionDef::FillArguments,
|
|
||||||
/// Whether to show full function/method signatures in completion docs.
|
|
||||||
completion_fullFunctionSignatures_enable: bool = false,
|
|
||||||
/// Maximum number of completions to return. If `None`, the limit is infinite.
|
|
||||||
completion_limit: Option<usize> = None,
|
|
||||||
/// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
|
|
||||||
completion_postfix_enable: bool = true,
|
|
||||||
/// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
|
|
||||||
completion_privateEditable_enable: bool = false,
|
|
||||||
/// Custom completion snippets.
|
|
||||||
// NOTE: we use IndexMap for deterministic serialization ordering
|
|
||||||
completion_snippets_custom: IndexMap<String, SnippetDef> = serde_json::from_str(r#"{
|
|
||||||
"Arc::new": {
|
|
||||||
"postfix": "arc",
|
|
||||||
"body": "Arc::new(${receiver})",
|
|
||||||
"requires": "std::sync::Arc",
|
|
||||||
"description": "Put the expression into an `Arc`",
|
|
||||||
"scope": "expr"
|
|
||||||
},
|
|
||||||
"Rc::new": {
|
|
||||||
"postfix": "rc",
|
|
||||||
"body": "Rc::new(${receiver})",
|
|
||||||
"requires": "std::rc::Rc",
|
|
||||||
"description": "Put the expression into an `Rc`",
|
|
||||||
"scope": "expr"
|
|
||||||
},
|
|
||||||
"Box::pin": {
|
|
||||||
"postfix": "pinbox",
|
|
||||||
"body": "Box::pin(${receiver})",
|
|
||||||
"requires": "std::boxed::Box",
|
|
||||||
"description": "Put the expression into a pinned `Box`",
|
|
||||||
"scope": "expr"
|
|
||||||
},
|
|
||||||
"Ok": {
|
|
||||||
"postfix": "ok",
|
|
||||||
"body": "Ok(${receiver})",
|
|
||||||
"description": "Wrap the expression in a `Result::Ok`",
|
|
||||||
"scope": "expr"
|
|
||||||
},
|
|
||||||
"Err": {
|
|
||||||
"postfix": "err",
|
|
||||||
"body": "Err(${receiver})",
|
|
||||||
"description": "Wrap the expression in a `Result::Err`",
|
|
||||||
"scope": "expr"
|
|
||||||
},
|
|
||||||
"Some": {
|
|
||||||
"postfix": "some",
|
|
||||||
"body": "Some(${receiver})",
|
|
||||||
"description": "Wrap the expression in an `Option::Some`",
|
|
||||||
"scope": "expr"
|
|
||||||
}
|
|
||||||
}"#).unwrap(),
|
|
||||||
/// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
|
|
||||||
completion_termSearch_enable: bool = false,
|
|
||||||
/// Term search fuel in "units of work" for autocompletion (Defaults to 200).
|
|
||||||
completion_termSearch_fuel: usize = 200,
|
|
||||||
|
|
||||||
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
|
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
|
||||||
highlightRelated_breakPoints_enable: bool = true,
|
highlightRelated_breakPoints_enable: bool = true,
|
||||||
/// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
|
/// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
|
||||||
|
@ -659,23 +661,304 @@ pub struct Config {
|
||||||
workspace_roots: Vec<AbsPathBuf>,
|
workspace_roots: Vec<AbsPathBuf>,
|
||||||
caps: lsp_types::ClientCapabilities,
|
caps: lsp_types::ClientCapabilities,
|
||||||
root_path: AbsPathBuf,
|
root_path: AbsPathBuf,
|
||||||
detached_files: Vec<AbsPathBuf>,
|
|
||||||
snippets: Vec<Snippet>,
|
snippets: Vec<Snippet>,
|
||||||
visual_studio_code_version: Option<Version>,
|
visual_studio_code_version: Option<Version>,
|
||||||
|
|
||||||
default_config: DefaultConfigData,
|
default_config: DefaultConfigData,
|
||||||
client_config: FullConfigInput,
|
/// Config node that obtains its initial value during the server initialization and
|
||||||
user_config: GlobalLocalConfigInput,
|
/// by receiving a `lsp_types::notification::DidChangeConfiguration`.
|
||||||
#[allow(dead_code)]
|
client_config: ClientConfig,
|
||||||
|
|
||||||
|
/// Path to the root configuration file. This can be seen as a generic way to define what would be `$XDG_CONFIG_HOME/rust-analyzer/rust-analyzer.toml` in Linux.
|
||||||
|
/// If not specified by init of a `Config` object this value defaults to :
|
||||||
|
///
|
||||||
|
/// |Platform | Value | Example |
|
||||||
|
/// | ------- | ------------------------------------- | ---------------------------------------- |
|
||||||
|
/// | Linux | `$XDG_CONFIG_HOME` or `$HOME`/.config | /home/alice/.config |
|
||||||
|
/// | macOS | `$HOME`/Library/Application Support | /Users/Alice/Library/Application Support |
|
||||||
|
/// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming |
|
||||||
|
user_config_path: VfsPath,
|
||||||
|
|
||||||
|
/// FIXME @alibektas : Change this to sth better.
|
||||||
|
/// Config node whose values apply to **every** Rust project.
|
||||||
|
user_config: Option<RatomlNode>,
|
||||||
|
|
||||||
|
/// A special file for this session whose path is set to `self.root_path.join("rust-analyzer.toml")`
|
||||||
|
root_ratoml_path: VfsPath,
|
||||||
|
|
||||||
|
/// This file can be used to make global changes while having only a workspace-wide scope.
|
||||||
|
root_ratoml: Option<RatomlNode>,
|
||||||
|
|
||||||
|
/// For every `SourceRoot` there can be at most one RATOML file.
|
||||||
ratoml_files: FxHashMap<SourceRootId, RatomlNode>,
|
ratoml_files: FxHashMap<SourceRootId, RatomlNode>,
|
||||||
|
|
||||||
|
/// Clone of the value that is stored inside a `GlobalState`.
|
||||||
|
source_root_parent_map: Arc<FxHashMap<SourceRootId, SourceRootId>>,
|
||||||
|
|
||||||
|
/// Changes made to client and global configurations will partially not be reflected even after `.apply_change()` was called.
|
||||||
|
/// This field signals that the `GlobalState` should call its `update_configuration()` method.
|
||||||
|
should_update: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct RatomlNode {
|
struct RatomlNode {
|
||||||
#[allow(dead_code)]
|
|
||||||
node: GlobalLocalConfigInput,
|
node: GlobalLocalConfigInput,
|
||||||
|
file_id: FileId,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
struct ClientConfig {
|
||||||
|
node: FullConfigInput,
|
||||||
|
detached_files: Vec<AbsPathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for RatomlNode {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
let mut s = serializer.serialize_struct("RatomlNode", 2)?;
|
||||||
|
s.serialize_field("file_id", &self.file_id.index())?;
|
||||||
|
s.serialize_field("config", &self.node)?;
|
||||||
|
s.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Hash, Eq, PartialEq)]
|
||||||
|
pub(crate) enum ConfigNodeKey {
|
||||||
|
Ratoml(SourceRootId),
|
||||||
|
Client,
|
||||||
|
User,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for ConfigNodeKey {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
match self {
|
||||||
|
ConfigNodeKey::Ratoml(source_root_id) => serializer.serialize_u32(source_root_id.0),
|
||||||
|
ConfigNodeKey::Client => serializer.serialize_str("client"),
|
||||||
|
ConfigNodeKey::User => serializer.serialize_str("user"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
enum ConfigNodeValue<'a> {
|
||||||
|
/// `rust-analyzer::config` module works by setting
|
||||||
|
/// a mapping between `SourceRootId` and `ConfigInput`.
|
||||||
|
/// Storing a `FileId` is mostly for debugging purposes.
|
||||||
|
Ratoml(&'a RatomlNode),
|
||||||
|
Client(&'a FullConfigInput),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
/// FIXME @alibektas : Before integration tests, I thought I would
|
||||||
|
/// get the debug output of the config tree and do assertions based on it.
|
||||||
|
/// The reason why I didn't delete this is that we may want to have a lsp_ext
|
||||||
|
/// like "DebugConfigTree" so that it is easier for users to get a snapshot of
|
||||||
|
/// the config state for us to debug.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
parent: Option<SourceRootId>,
|
/// Walk towards the root starting from a specified `ConfigNode`
|
||||||
|
fn traverse(
|
||||||
|
&self,
|
||||||
|
start: ConfigNodeKey,
|
||||||
|
) -> impl Iterator<Item = (ConfigNodeKey, ConfigNodeValue<'_>)> {
|
||||||
|
let mut v = vec![];
|
||||||
|
|
||||||
|
if let ConfigNodeKey::Ratoml(start) = start {
|
||||||
|
let mut par: Option<SourceRootId> = Some(start);
|
||||||
|
while let Some(source_root_id) = par {
|
||||||
|
par = self.source_root_parent_map.get(&start).copied();
|
||||||
|
if let Some(config) = self.ratoml_files.get(&source_root_id) {
|
||||||
|
v.push((
|
||||||
|
ConfigNodeKey::Ratoml(source_root_id),
|
||||||
|
ConfigNodeValue::Ratoml(config),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
v.push((ConfigNodeKey::Client, ConfigNodeValue::Client(&self.client_config.node)));
|
||||||
|
|
||||||
|
if let Some(user_config) = self.user_config.as_ref() {
|
||||||
|
v.push((ConfigNodeKey::User, ConfigNodeValue::Ratoml(user_config)));
|
||||||
|
}
|
||||||
|
|
||||||
|
v.into_iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn user_config_path(&self) -> &VfsPath {
|
||||||
|
&self.user_config_path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn should_update(&self) -> bool {
|
||||||
|
self.should_update
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME @alibektas : Server's health uses error sink but in other places it is not used atm.
|
||||||
|
pub fn apply_change(&self, change: ConfigChange, error_sink: &mut ConfigError) -> Config {
|
||||||
|
let mut config = self.clone();
|
||||||
|
let mut toml_errors = vec![];
|
||||||
|
let mut json_errors = vec![];
|
||||||
|
|
||||||
|
config.should_update = false;
|
||||||
|
|
||||||
|
if let Some((file_id, change)) = change.user_config_change {
|
||||||
|
config.user_config = Some(RatomlNode {
|
||||||
|
file_id,
|
||||||
|
node: GlobalLocalConfigInput::from_toml(
|
||||||
|
toml::from_str(change.to_string().as_str()).unwrap(),
|
||||||
|
&mut toml_errors,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
config.should_update = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(mut json) = change.client_config_change {
|
||||||
|
tracing::info!("updating config from JSON: {:#}", json);
|
||||||
|
if !(json.is_null() || json.as_object().map_or(false, |it| it.is_empty())) {
|
||||||
|
let detached_files = get_field::<Vec<Utf8PathBuf>>(
|
||||||
|
&mut json,
|
||||||
|
&mut json_errors,
|
||||||
|
"detachedFiles",
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into_iter()
|
||||||
|
.map(AbsPathBuf::assert)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
patch_old_style::patch_json_for_outdated_configs(&mut json);
|
||||||
|
|
||||||
|
config.client_config = ClientConfig {
|
||||||
|
node: FullConfigInput::from_json(json, &mut json_errors),
|
||||||
|
detached_files,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
config.should_update = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((file_id, change)) = change.root_ratoml_change {
|
||||||
|
config.root_ratoml = Some(RatomlNode {
|
||||||
|
file_id,
|
||||||
|
node: GlobalLocalConfigInput::from_toml(
|
||||||
|
toml::from_str(change.to_string().as_str()).unwrap(),
|
||||||
|
&mut toml_errors,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
config.should_update = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(change) = change.ratoml_file_change {
|
||||||
|
for (source_root_id, (file_id, _, text)) in change {
|
||||||
|
if let Some(text) = text {
|
||||||
|
config.ratoml_files.insert(
|
||||||
|
source_root_id,
|
||||||
|
RatomlNode {
|
||||||
|
file_id,
|
||||||
|
node: GlobalLocalConfigInput::from_toml(
|
||||||
|
toml::from_str(&text).unwrap(),
|
||||||
|
&mut toml_errors,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(source_root_map) = change.source_map_change {
|
||||||
|
config.source_root_parent_map = source_root_map;
|
||||||
|
}
|
||||||
|
|
||||||
|
let snips = self.completion_snippets_custom().to_owned();
|
||||||
|
|
||||||
|
for (name, def) in snips.iter() {
|
||||||
|
if def.prefix.is_empty() && def.postfix.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let scope = match def.scope {
|
||||||
|
SnippetScopeDef::Expr => SnippetScope::Expr,
|
||||||
|
SnippetScopeDef::Type => SnippetScope::Type,
|
||||||
|
SnippetScopeDef::Item => SnippetScope::Item,
|
||||||
|
};
|
||||||
|
match Snippet::new(
|
||||||
|
&def.prefix,
|
||||||
|
&def.postfix,
|
||||||
|
&def.body,
|
||||||
|
def.description.as_ref().unwrap_or(name),
|
||||||
|
&def.requires,
|
||||||
|
scope,
|
||||||
|
) {
|
||||||
|
Some(snippet) => config.snippets.push(snippet),
|
||||||
|
None => error_sink.0.push(ConfigErrorInner::JsonError(
|
||||||
|
format!("snippet {name} is invalid"),
|
||||||
|
<serde_json::Error as serde::de::Error>::custom(
|
||||||
|
"snippet path is invalid or triggers are missing",
|
||||||
|
),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.check_command().is_empty() {
|
||||||
|
error_sink.0.push(ConfigErrorInner::JsonError(
|
||||||
|
"/check/command".to_owned(),
|
||||||
|
serde_json::Error::custom("expected a non-empty string"),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
config
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug)]
|
||||||
|
pub struct ConfigChange {
|
||||||
|
user_config_change: Option<(FileId, String)>,
|
||||||
|
root_ratoml_change: Option<(FileId, String)>,
|
||||||
|
client_config_change: Option<serde_json::Value>,
|
||||||
|
ratoml_file_change: Option<FxHashMap<SourceRootId, (FileId, VfsPath, Option<String>)>>,
|
||||||
|
source_map_change: Option<Arc<FxHashMap<SourceRootId, SourceRootId>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConfigChange {
|
||||||
|
pub fn change_ratoml(
|
||||||
|
&mut self,
|
||||||
|
source_root: SourceRootId,
|
||||||
|
file_id: FileId,
|
||||||
|
vfs_path: VfsPath,
|
||||||
|
content: Option<String>,
|
||||||
|
) -> Option<(FileId, VfsPath, Option<String>)> {
|
||||||
|
if let Some(changes) = self.ratoml_file_change.as_mut() {
|
||||||
|
changes.insert(source_root, (file_id, vfs_path, content))
|
||||||
|
} else {
|
||||||
|
let mut map = FxHashMap::default();
|
||||||
|
map.insert(source_root, (file_id, vfs_path, content));
|
||||||
|
self.ratoml_file_change = Some(map);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn change_user_config(&mut self, content: Option<(FileId, String)>) {
|
||||||
|
assert!(self.user_config_change.is_none()); // Otherwise it is a double write.
|
||||||
|
self.user_config_change = content;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn change_root_ratoml(&mut self, content: Option<(FileId, String)>) {
|
||||||
|
assert!(self.user_config_change.is_none()); // Otherwise it is a double write.
|
||||||
|
self.root_ratoml_change = content;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn change_client_config(&mut self, change: serde_json::Value) {
|
||||||
|
self.client_config_change = Some(change);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn change_source_root_parent_map(
|
||||||
|
&mut self,
|
||||||
|
source_root_map: Arc<FxHashMap<SourceRootId, SourceRootId>>,
|
||||||
|
) {
|
||||||
|
assert!(self.source_map_change.is_none());
|
||||||
|
self.source_map_change = Some(source_root_map.clone());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! try_ {
|
macro_rules! try_ {
|
||||||
|
@ -866,23 +1149,37 @@ pub struct ClientCommandsConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ConfigError {
|
pub enum ConfigErrorInner {
|
||||||
errors: Vec<(String, serde_json::Error)>,
|
JsonError(String, serde_json::Error),
|
||||||
|
Toml(String, toml::de::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct ConfigError(Vec<ConfigErrorInner>);
|
||||||
|
|
||||||
|
impl ConfigError {
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.0.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConfigError {}
|
||||||
|
|
||||||
impl fmt::Display for ConfigError {
|
impl fmt::Display for ConfigError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let errors = self.errors.iter().format_with("\n", |(key, e), f| {
|
let errors = self.0.iter().format_with("\n", |inner, f| match inner {
|
||||||
f(key)?;
|
ConfigErrorInner::JsonError(key, e) => {
|
||||||
f(&": ")?;
|
f(key)?;
|
||||||
f(e)
|
f(&": ")?;
|
||||||
|
f(e)
|
||||||
|
}
|
||||||
|
ConfigErrorInner::Toml(key, e) => {
|
||||||
|
f(key)?;
|
||||||
|
f(&": ")?;
|
||||||
|
f(e)
|
||||||
|
}
|
||||||
});
|
});
|
||||||
write!(
|
write!(f, "invalid config value{}:\n{}", if self.0.len() == 1 { "" } else { "s" }, errors)
|
||||||
f,
|
|
||||||
"invalid config value{}:\n{}",
|
|
||||||
if self.errors.len() == 1 { "" } else { "s" },
|
|
||||||
errors
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -894,19 +1191,45 @@ impl Config {
|
||||||
caps: ClientCapabilities,
|
caps: ClientCapabilities,
|
||||||
workspace_roots: Vec<AbsPathBuf>,
|
workspace_roots: Vec<AbsPathBuf>,
|
||||||
visual_studio_code_version: Option<Version>,
|
visual_studio_code_version: Option<Version>,
|
||||||
|
user_config_path: Option<Utf8PathBuf>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
let user_config_path = if let Some(user_config_path) = user_config_path {
|
||||||
|
user_config_path.join("rust-analyzer").join("rust-analyzer.toml")
|
||||||
|
} else {
|
||||||
|
let p = config_dir()
|
||||||
|
.expect("A config dir is expected to existed on all platforms ra supports.")
|
||||||
|
.join("rust-analyzer")
|
||||||
|
.join("rust-analyzer.toml");
|
||||||
|
Utf8PathBuf::from_path_buf(p).expect("Config dir expected to be abs.")
|
||||||
|
};
|
||||||
|
|
||||||
|
// A user config cannot be a virtual path as rust-analyzer cannot support watching changes in virtual paths.
|
||||||
|
// See `GlobalState::process_changes` to get more info.
|
||||||
|
// FIXME @alibektas : Temporary solution. I don't think this is right as at some point we may allow users to specify
|
||||||
|
// custom USER_CONFIG_PATHs which may also be relative.
|
||||||
|
let user_config_path = VfsPath::from(AbsPathBuf::assert(user_config_path));
|
||||||
|
let root_ratoml_path = {
|
||||||
|
let mut p = root_path.clone();
|
||||||
|
p.push("rust-analyzer.toml");
|
||||||
|
VfsPath::new_real_path(p.to_string())
|
||||||
|
};
|
||||||
|
|
||||||
Config {
|
Config {
|
||||||
caps,
|
caps,
|
||||||
detached_files: Vec::new(),
|
|
||||||
discovered_projects: Vec::new(),
|
discovered_projects: Vec::new(),
|
||||||
root_path,
|
root_path,
|
||||||
snippets: Default::default(),
|
snippets: Default::default(),
|
||||||
workspace_roots,
|
workspace_roots,
|
||||||
visual_studio_code_version,
|
visual_studio_code_version,
|
||||||
client_config: FullConfigInput::default(),
|
client_config: ClientConfig::default(),
|
||||||
user_config: GlobalLocalConfigInput::default(),
|
user_config: None,
|
||||||
ratoml_files: FxHashMap::default(),
|
ratoml_files: FxHashMap::default(),
|
||||||
default_config: DefaultConfigData::default(),
|
default_config: DefaultConfigData::default(),
|
||||||
|
source_root_parent_map: Arc::new(FxHashMap::default()),
|
||||||
|
user_config_path,
|
||||||
|
root_ratoml: None,
|
||||||
|
root_ratoml_path,
|
||||||
|
should_update: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -929,71 +1252,6 @@ impl Config {
|
||||||
self.workspace_roots.extend(paths);
|
self.workspace_roots.extend(paths);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigError> {
|
|
||||||
tracing::info!("updating config from JSON: {:#}", json);
|
|
||||||
if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let mut errors = Vec::new();
|
|
||||||
self.detached_files =
|
|
||||||
get_field::<Vec<Utf8PathBuf>>(&mut json, &mut errors, "detachedFiles", None)
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter()
|
|
||||||
.map(AbsPathBuf::assert)
|
|
||||||
.collect();
|
|
||||||
patch_old_style::patch_json_for_outdated_configs(&mut json);
|
|
||||||
self.client_config = FullConfigInput::from_json(json, &mut errors);
|
|
||||||
tracing::debug!(?self.client_config, "deserialized config data");
|
|
||||||
self.snippets.clear();
|
|
||||||
|
|
||||||
let snips = self.completion_snippets_custom(None).to_owned();
|
|
||||||
|
|
||||||
for (name, def) in snips.iter() {
|
|
||||||
if def.prefix.is_empty() && def.postfix.is_empty() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let scope = match def.scope {
|
|
||||||
SnippetScopeDef::Expr => SnippetScope::Expr,
|
|
||||||
SnippetScopeDef::Type => SnippetScope::Type,
|
|
||||||
SnippetScopeDef::Item => SnippetScope::Item,
|
|
||||||
};
|
|
||||||
match Snippet::new(
|
|
||||||
&def.prefix,
|
|
||||||
&def.postfix,
|
|
||||||
&def.body,
|
|
||||||
def.description.as_ref().unwrap_or(name),
|
|
||||||
&def.requires,
|
|
||||||
scope,
|
|
||||||
) {
|
|
||||||
Some(snippet) => self.snippets.push(snippet),
|
|
||||||
None => errors.push((
|
|
||||||
format!("snippet {name} is invalid"),
|
|
||||||
<serde_json::Error as serde::de::Error>::custom(
|
|
||||||
"snippet path is invalid or triggers are missing",
|
|
||||||
),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.validate(&mut errors);
|
|
||||||
|
|
||||||
if errors.is_empty() {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(ConfigError { errors })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
|
|
||||||
use serde::de::Error;
|
|
||||||
if self.check_command().is_empty() {
|
|
||||||
error_sink.push((
|
|
||||||
"/check/command".to_owned(),
|
|
||||||
serde_json::Error::custom("expected a non-empty string"),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn json_schema() -> serde_json::Value {
|
pub fn json_schema() -> serde_json::Value {
|
||||||
FullConfigInput::json_schema()
|
FullConfigInput::json_schema()
|
||||||
}
|
}
|
||||||
|
@ -1002,12 +1260,12 @@ impl Config {
|
||||||
&self.root_path
|
&self.root_path
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn caps(&self) -> &lsp_types::ClientCapabilities {
|
pub fn root_ratoml_path(&self) -> &VfsPath {
|
||||||
&self.caps
|
&self.root_ratoml_path
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn detached_files(&self) -> &[AbsPathBuf] {
|
pub fn caps(&self) -> &lsp_types::ClientCapabilities {
|
||||||
&self.detached_files
|
&self.caps
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1018,7 +1276,7 @@ impl Config {
|
||||||
allowed: None,
|
allowed: None,
|
||||||
insert_use: self.insert_use_config(source_root),
|
insert_use: self.insert_use_config(source_root),
|
||||||
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
||||||
assist_emit_must_use: self.assist_emitMustUse().to_owned(),
|
assist_emit_must_use: self.assist_emitMustUse(source_root).to_owned(),
|
||||||
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
||||||
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
|
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
|
||||||
}
|
}
|
||||||
|
@ -1026,17 +1284,13 @@ impl Config {
|
||||||
|
|
||||||
pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
|
pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
|
||||||
CompletionConfig {
|
CompletionConfig {
|
||||||
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
|
enable_postfix_completions: self.completion_postfix_enable().to_owned(),
|
||||||
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
|
enable_imports_on_the_fly: self.completion_autoimport_enable().to_owned()
|
||||||
&& completion_item_edit_resolve(&self.caps),
|
&& completion_item_edit_resolve(&self.caps),
|
||||||
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
|
enable_self_on_the_fly: self.completion_autoself_enable().to_owned(),
|
||||||
enable_private_editable: self.completion_privateEditable_enable(source_root).to_owned(),
|
enable_private_editable: self.completion_privateEditable_enable().to_owned(),
|
||||||
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
|
full_function_signatures: self.completion_fullFunctionSignatures_enable().to_owned(),
|
||||||
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
|
callable: match self.completion_callable_snippets() {
|
||||||
full_function_signatures: self
|
|
||||||
.completion_fullFunctionSignatures_enable(source_root)
|
|
||||||
.to_owned(),
|
|
||||||
callable: match self.completion_callable_snippets(source_root) {
|
|
||||||
CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
|
CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
|
||||||
CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
|
CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
|
||||||
CallableCompletionDef::None => None,
|
CallableCompletionDef::None => None,
|
||||||
|
@ -1055,10 +1309,18 @@ impl Config {
|
||||||
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
||||||
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
||||||
snippets: self.snippets.clone().to_vec(),
|
snippets: self.snippets.clone().to_vec(),
|
||||||
limit: self.completion_limit(source_root).to_owned(),
|
limit: self.completion_limit().to_owned(),
|
||||||
|
enable_term_search: self.completion_termSearch_enable().to_owned(),
|
||||||
|
term_search_fuel: self.completion_termSearch_fuel().to_owned() as u64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn detached_files(&self) -> &Vec<AbsPathBuf> {
|
||||||
|
// FIXME @alibektas : This is the only config that is confusing. If it's a proper configuration
|
||||||
|
// why is it not among the others? If it's client only which I doubt it is current state should be alright
|
||||||
|
&self.client_config.detached_files
|
||||||
|
}
|
||||||
|
|
||||||
pub fn diagnostics(&self, source_root: Option<SourceRootId>) -> DiagnosticsConfig {
|
pub fn diagnostics(&self, source_root: Option<SourceRootId>) -> DiagnosticsConfig {
|
||||||
DiagnosticsConfig {
|
DiagnosticsConfig {
|
||||||
enabled: *self.diagnostics_enable(),
|
enabled: *self.diagnostics_enable(),
|
||||||
|
@ -1066,7 +1328,7 @@ impl Config {
|
||||||
proc_macros_enabled: *self.procMacro_enable(),
|
proc_macros_enabled: *self.procMacro_enable(),
|
||||||
disable_experimental: !self.diagnostics_experimental_enable(),
|
disable_experimental: !self.diagnostics_experimental_enable(),
|
||||||
disabled: self.diagnostics_disabled().clone(),
|
disabled: self.diagnostics_disabled().clone(),
|
||||||
expr_fill_default: match self.assist_expressionFillDefault() {
|
expr_fill_default: match self.assist_expressionFillDefault(source_root) {
|
||||||
ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
|
ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
|
||||||
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
|
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
|
||||||
},
|
},
|
||||||
|
@ -2016,7 +2278,7 @@ enum SnippetScopeDef {
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
struct SnippetDef {
|
pub(crate) struct SnippetDef {
|
||||||
#[serde(with = "single_or_array")]
|
#[serde(with = "single_or_array")]
|
||||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
prefix: Vec<String>,
|
prefix: Vec<String>,
|
||||||
|
@ -2111,7 +2373,7 @@ enum ImportGranularityDef {
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Copy, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Copy, Clone)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
enum CallableCompletionDef {
|
pub(crate) enum CallableCompletionDef {
|
||||||
FillArguments,
|
FillArguments,
|
||||||
AddParentheses,
|
AddParentheses,
|
||||||
None,
|
None,
|
||||||
|
@ -2318,13 +2580,28 @@ macro_rules! _impl_for_config_data {
|
||||||
$(
|
$(
|
||||||
$($doc)*
|
$($doc)*
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
$vis fn $field(&self, _source_root: Option<SourceRootId>) -> &$ty {
|
$vis fn $field(&self, source_root: Option<SourceRootId>) -> &$ty {
|
||||||
if let Some(v) = self.client_config.local.$field.as_ref() {
|
|
||||||
|
if source_root.is_some() {
|
||||||
|
let mut par: Option<SourceRootId> = source_root;
|
||||||
|
while let Some(source_root_id) = par {
|
||||||
|
par = self.source_root_parent_map.get(&source_root_id).copied();
|
||||||
|
if let Some(config) = self.ratoml_files.get(&source_root_id) {
|
||||||
|
if let Some(value) = config.node.local.$field.as_ref() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(v) = self.client_config.node.local.$field.as_ref() {
|
||||||
return &v;
|
return &v;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(v) = self.user_config.local.$field.as_ref() {
|
if let Some(user_config) = self.user_config.as_ref() {
|
||||||
return &v;
|
if let Some(v) = user_config.node.local.$field.as_ref() {
|
||||||
|
return &v;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
&self.default_config.local.$field
|
&self.default_config.local.$field
|
||||||
|
@ -2342,12 +2619,21 @@ macro_rules! _impl_for_config_data {
|
||||||
$($doc)*
|
$($doc)*
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
$vis fn $field(&self) -> &$ty {
|
$vis fn $field(&self) -> &$ty {
|
||||||
if let Some(v) = self.client_config.global.$field.as_ref() {
|
|
||||||
|
if let Some(root_path_ratoml) = self.root_ratoml.as_ref() {
|
||||||
|
if let Some(v) = root_path_ratoml.node.global.$field.as_ref() {
|
||||||
|
return &v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(v) = self.client_config.node.global.$field.as_ref() {
|
||||||
return &v;
|
return &v;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(v) = self.user_config.global.$field.as_ref() {
|
if let Some(user_config) = self.user_config.as_ref() {
|
||||||
return &v;
|
if let Some(v) = user_config.node.global.$field.as_ref() {
|
||||||
|
return &v;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
&self.default_config.global.$field
|
&self.default_config.global.$field
|
||||||
|
@ -2502,11 +2788,10 @@ struct DefaultConfigData {
|
||||||
/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
|
/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
|
||||||
/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
|
/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
|
||||||
/// all fields being None.
|
/// all fields being None.
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default, Serialize)]
|
||||||
struct FullConfigInput {
|
struct FullConfigInput {
|
||||||
global: GlobalConfigInput,
|
global: GlobalConfigInput,
|
||||||
local: LocalConfigInput,
|
local: LocalConfigInput,
|
||||||
#[allow(dead_code)]
|
|
||||||
client: ClientConfigInput,
|
client: ClientConfigInput,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2545,7 +2830,7 @@ impl FullConfigInput {
|
||||||
/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
|
/// All of the config levels, all fields `Option<T>`, to describe fields that are actually set by
|
||||||
/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
|
/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to
|
||||||
/// all fields being None.
|
/// all fields being None.
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default, Serialize)]
|
||||||
struct GlobalLocalConfigInput {
|
struct GlobalLocalConfigInput {
|
||||||
global: GlobalConfigInput,
|
global: GlobalConfigInput,
|
||||||
local: LocalConfigInput,
|
local: LocalConfigInput,
|
||||||
|
@ -3104,12 +3389,17 @@ mod tests {
|
||||||
Default::default(),
|
Default::default(),
|
||||||
vec![],
|
vec![],
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
config
|
|
||||||
.update(serde_json::json!({
|
let mut change = ConfigChange::default();
|
||||||
"procMacro_server": null,
|
change.change_client_config(serde_json::json!({
|
||||||
}))
|
"procMacro" : {
|
||||||
.unwrap();
|
"server": null,
|
||||||
|
}}));
|
||||||
|
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
assert_eq!(config.proc_macro_srv(), None);
|
assert_eq!(config.proc_macro_srv(), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3120,12 +3410,16 @@ mod tests {
|
||||||
Default::default(),
|
Default::default(),
|
||||||
vec![],
|
vec![],
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
config
|
let mut change = ConfigChange::default();
|
||||||
.update(serde_json::json!({
|
change.change_client_config(serde_json::json!({
|
||||||
"procMacro": {"server": project_root().display().to_string()}
|
"procMacro" : {
|
||||||
}))
|
"server": project_root().display().to_string(),
|
||||||
.unwrap();
|
}}));
|
||||||
|
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap()));
|
assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3136,12 +3430,19 @@ mod tests {
|
||||||
Default::default(),
|
Default::default(),
|
||||||
vec![],
|
vec![],
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
config
|
|
||||||
.update(serde_json::json!({
|
let mut change = ConfigChange::default();
|
||||||
"procMacro": {"server": "./server"}
|
|
||||||
}))
|
change.change_client_config(serde_json::json!({
|
||||||
.unwrap();
|
"procMacro" : {
|
||||||
|
"server": "./server"
|
||||||
|
}}));
|
||||||
|
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
config.proc_macro_srv(),
|
config.proc_macro_srv(),
|
||||||
Some(AbsPathBuf::try_from(project_root().join("./server")).unwrap())
|
Some(AbsPathBuf::try_from(project_root().join("./server")).unwrap())
|
||||||
|
@ -3155,12 +3456,17 @@ mod tests {
|
||||||
Default::default(),
|
Default::default(),
|
||||||
vec![],
|
vec![],
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
config
|
|
||||||
.update(serde_json::json!({
|
let mut change = ConfigChange::default();
|
||||||
"rust": { "analyzerTargetDir": null }
|
|
||||||
}))
|
change.change_client_config(serde_json::json!({
|
||||||
.unwrap();
|
"rust" : { "analyzerTargetDir" : null }
|
||||||
|
}));
|
||||||
|
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
assert_eq!(config.cargo_targetDir(), &None);
|
assert_eq!(config.cargo_targetDir(), &None);
|
||||||
assert!(
|
assert!(
|
||||||
matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir.is_none())
|
matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir.is_none())
|
||||||
|
@ -3174,12 +3480,17 @@ mod tests {
|
||||||
Default::default(),
|
Default::default(),
|
||||||
vec![],
|
vec![],
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
config
|
|
||||||
.update(serde_json::json!({
|
let mut change = ConfigChange::default();
|
||||||
"rust": { "analyzerTargetDir": true }
|
change.change_client_config(serde_json::json!({
|
||||||
}))
|
"rust" : { "analyzerTargetDir" : true }
|
||||||
.unwrap();
|
}));
|
||||||
|
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
|
|
||||||
assert_eq!(config.cargo_targetDir(), &Some(TargetDirectory::UseSubdirectory(true)));
|
assert_eq!(config.cargo_targetDir(), &Some(TargetDirectory::UseSubdirectory(true)));
|
||||||
assert!(
|
assert!(
|
||||||
matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer")))
|
matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer")))
|
||||||
|
@ -3193,12 +3504,17 @@ mod tests {
|
||||||
Default::default(),
|
Default::default(),
|
||||||
vec![],
|
vec![],
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
config
|
|
||||||
.update(serde_json::json!({
|
let mut change = ConfigChange::default();
|
||||||
"rust": { "analyzerTargetDir": "other_folder" }
|
change.change_client_config(serde_json::json!({
|
||||||
}))
|
"rust" : { "analyzerTargetDir" : "other_folder" }
|
||||||
.unwrap();
|
}));
|
||||||
|
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
config.cargo_targetDir(),
|
config.cargo_targetDir(),
|
||||||
&Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder")))
|
&Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder")))
|
||||||
|
|
|
@ -154,7 +154,7 @@ pub(crate) fn fetch_native_diagnostics(
|
||||||
.copied()
|
.copied()
|
||||||
.filter_map(|file_id| {
|
.filter_map(|file_id| {
|
||||||
let line_index = snapshot.file_line_index(file_id).ok()?;
|
let line_index = snapshot.file_line_index(file_id).ok()?;
|
||||||
let source_root = snapshot.analysis.source_root(file_id).ok()?;
|
let source_root = snapshot.analysis.source_root_id(file_id).ok()?;
|
||||||
|
|
||||||
let diagnostics = snapshot
|
let diagnostics = snapshot
|
||||||
.analysis
|
.analysis
|
||||||
|
|
|
@ -547,6 +547,7 @@ mod tests {
|
||||||
ClientCapabilities::default(),
|
ClientCapabilities::default(),
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
let snap = state.snapshot();
|
let snap = state.snapshot();
|
||||||
|
|
|
@ -25,13 +25,16 @@ use project_model::{
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use tracing::{span, Level};
|
use tracing::{span, Level};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use vfs::{AnchoredPathBuf, Vfs};
|
use vfs::{AnchoredPathBuf, Vfs, VfsPath};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::{Config, ConfigError},
|
config::{Config, ConfigChange, ConfigError},
|
||||||
diagnostics::{CheckFixes, DiagnosticCollection},
|
diagnostics::{CheckFixes, DiagnosticCollection},
|
||||||
line_index::{LineEndings, LineIndex},
|
line_index::{LineEndings, LineIndex},
|
||||||
lsp::{from_proto, to_proto::url_from_abs_path},
|
lsp::{
|
||||||
|
from_proto::{self},
|
||||||
|
to_proto::url_from_abs_path,
|
||||||
|
},
|
||||||
lsp_ext,
|
lsp_ext,
|
||||||
main_loop::Task,
|
main_loop::Task,
|
||||||
mem_docs::MemDocs,
|
mem_docs::MemDocs,
|
||||||
|
@ -71,7 +74,7 @@ pub(crate) struct GlobalState {
|
||||||
pub(crate) mem_docs: MemDocs,
|
pub(crate) mem_docs: MemDocs,
|
||||||
pub(crate) source_root_config: SourceRootConfig,
|
pub(crate) source_root_config: SourceRootConfig,
|
||||||
/// A mapping that maps a local source root's `SourceRootId` to it parent's `SourceRootId`, if it has one.
|
/// A mapping that maps a local source root's `SourceRootId` to it parent's `SourceRootId`, if it has one.
|
||||||
pub(crate) local_roots_parent_map: FxHashMap<SourceRootId, SourceRootId>,
|
pub(crate) local_roots_parent_map: Arc<FxHashMap<SourceRootId, SourceRootId>>,
|
||||||
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
|
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
|
||||||
|
|
||||||
// status
|
// status
|
||||||
|
@ -213,7 +216,7 @@ impl GlobalState {
|
||||||
shutdown_requested: false,
|
shutdown_requested: false,
|
||||||
last_reported_status: None,
|
last_reported_status: None,
|
||||||
source_root_config: SourceRootConfig::default(),
|
source_root_config: SourceRootConfig::default(),
|
||||||
local_roots_parent_map: FxHashMap::default(),
|
local_roots_parent_map: Arc::new(FxHashMap::default()),
|
||||||
config_errors: Default::default(),
|
config_errors: Default::default(),
|
||||||
|
|
||||||
proc_macro_clients: Arc::from_iter([]),
|
proc_macro_clients: Arc::from_iter([]),
|
||||||
|
@ -254,6 +257,24 @@ impl GlobalState {
|
||||||
|
|
||||||
pub(crate) fn process_changes(&mut self) -> bool {
|
pub(crate) fn process_changes(&mut self) -> bool {
|
||||||
let _p = span!(Level::INFO, "GlobalState::process_changes").entered();
|
let _p = span!(Level::INFO, "GlobalState::process_changes").entered();
|
||||||
|
|
||||||
|
// We cannot directly resolve a change in a ratoml file to a format
|
||||||
|
// that can be used by the config module because config talks
|
||||||
|
// in `SourceRootId`s instead of `FileId`s and `FileId` -> `SourceRootId`
|
||||||
|
// mapping is not ready until `AnalysisHost::apply_changes` has been called.
|
||||||
|
let mut modified_ratoml_files: FxHashMap<FileId, vfs::VfsPath> = FxHashMap::default();
|
||||||
|
let mut ratoml_text_map: FxHashMap<FileId, (vfs::VfsPath, Option<String>)> =
|
||||||
|
FxHashMap::default();
|
||||||
|
|
||||||
|
let mut user_config_file: Option<(FileId, Option<String>)> = None;
|
||||||
|
let mut root_path_ratoml: Option<(FileId, Option<String>)> = None;
|
||||||
|
|
||||||
|
let root_vfs_path = {
|
||||||
|
let mut root_vfs_path = self.config.root_path().to_path_buf();
|
||||||
|
root_vfs_path.push("rust-analyzer.toml");
|
||||||
|
VfsPath::new_real_path(root_vfs_path.to_string())
|
||||||
|
};
|
||||||
|
|
||||||
let (change, modified_rust_files, workspace_structure_change) = {
|
let (change, modified_rust_files, workspace_structure_change) = {
|
||||||
let mut change = ChangeWithProcMacros::new();
|
let mut change = ChangeWithProcMacros::new();
|
||||||
let mut guard = self.vfs.write();
|
let mut guard = self.vfs.write();
|
||||||
|
@ -273,6 +294,11 @@ impl GlobalState {
|
||||||
let mut modified_rust_files = vec![];
|
let mut modified_rust_files = vec![];
|
||||||
for file in changed_files.into_values() {
|
for file in changed_files.into_values() {
|
||||||
let vfs_path = vfs.file_path(file.file_id);
|
let vfs_path = vfs.file_path(file.file_id);
|
||||||
|
if let Some(("rust-analyzer", Some("toml"))) = vfs_path.name_and_extension() {
|
||||||
|
// Remember ids to use them after `apply_changes`
|
||||||
|
modified_ratoml_files.insert(file.file_id, vfs_path.clone());
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(path) = vfs_path.as_path() {
|
if let Some(path) = vfs_path.as_path() {
|
||||||
has_structure_changes |= file.is_created_or_deleted();
|
has_structure_changes |= file.is_created_or_deleted();
|
||||||
|
|
||||||
|
@ -311,10 +337,30 @@ impl GlobalState {
|
||||||
}
|
}
|
||||||
let (vfs, line_endings_map) = &mut *RwLockUpgradableReadGuard::upgrade(guard);
|
let (vfs, line_endings_map) = &mut *RwLockUpgradableReadGuard::upgrade(guard);
|
||||||
bytes.into_iter().for_each(|(file_id, text)| match text {
|
bytes.into_iter().for_each(|(file_id, text)| match text {
|
||||||
None => change.change_file(file_id, None),
|
None => {
|
||||||
|
change.change_file(file_id, None);
|
||||||
|
if let Some(vfs_path) = modified_ratoml_files.get(&file_id) {
|
||||||
|
if vfs_path == self.config.user_config_path() {
|
||||||
|
user_config_file = Some((file_id, None));
|
||||||
|
} else if vfs_path == &root_vfs_path {
|
||||||
|
root_path_ratoml = Some((file_id, None));
|
||||||
|
} else {
|
||||||
|
ratoml_text_map.insert(file_id, (vfs_path.clone(), None));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some((text, line_endings)) => {
|
Some((text, line_endings)) => {
|
||||||
line_endings_map.insert(file_id, line_endings);
|
line_endings_map.insert(file_id, line_endings);
|
||||||
change.change_file(file_id, Some(text));
|
change.change_file(file_id, Some(text.clone()));
|
||||||
|
if let Some(vfs_path) = modified_ratoml_files.get(&file_id) {
|
||||||
|
if vfs_path == self.config.user_config_path() {
|
||||||
|
user_config_file = Some((file_id, Some(text.clone())));
|
||||||
|
} else if vfs_path == &root_vfs_path {
|
||||||
|
root_path_ratoml = Some((file_id, Some(text.clone())));
|
||||||
|
} else {
|
||||||
|
ratoml_text_map.insert(file_id, (vfs_path.clone(), Some(text.clone())));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if has_structure_changes {
|
if has_structure_changes {
|
||||||
|
@ -327,6 +373,54 @@ impl GlobalState {
|
||||||
let _p = span!(Level::INFO, "GlobalState::process_changes/apply_change").entered();
|
let _p = span!(Level::INFO, "GlobalState::process_changes/apply_change").entered();
|
||||||
self.analysis_host.apply_change(change);
|
self.analysis_host.apply_change(change);
|
||||||
|
|
||||||
|
let config_change = {
|
||||||
|
let mut change = ConfigChange::default();
|
||||||
|
let snap = self.analysis_host.analysis();
|
||||||
|
|
||||||
|
for (file_id, (vfs_path, text)) in ratoml_text_map {
|
||||||
|
// If change has been made to a ratoml file that
|
||||||
|
// belongs to a non-local source root, we will ignore it.
|
||||||
|
// As it doesn't make sense a users to use external config files.
|
||||||
|
if let Ok(source_root) = snap.source_root_id(file_id) {
|
||||||
|
if let Ok(true) = snap.is_local_source_root(source_root) {
|
||||||
|
if let Some((old_file, old_path, old_text)) =
|
||||||
|
change.change_ratoml(source_root, file_id, vfs_path.clone(), text)
|
||||||
|
{
|
||||||
|
// SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins.
|
||||||
|
if old_path < vfs_path {
|
||||||
|
span!(Level::ERROR, "Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect.");
|
||||||
|
// Put the old one back in.
|
||||||
|
change.change_ratoml(source_root, old_file, old_path, old_text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Mapping to a SourceRoot should always end up in `Ok`
|
||||||
|
span!(Level::ERROR, "Mapping to SourceRootId failed.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((file_id, Some(txt))) = user_config_file {
|
||||||
|
change.change_user_config(Some((file_id, txt)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((file_id, Some(txt))) = root_path_ratoml {
|
||||||
|
change.change_root_ratoml(Some((file_id, txt)));
|
||||||
|
}
|
||||||
|
|
||||||
|
change
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
let config = self.config.apply_change(config_change, &mut error_sink);
|
||||||
|
|
||||||
|
if config.should_update() {
|
||||||
|
self.update_configuration(config);
|
||||||
|
} else {
|
||||||
|
// No global or client level config was changed. So we can just naively replace config.
|
||||||
|
self.config = Arc::new(config);
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
if !matches!(&workspace_structure_change, Some((.., true))) {
|
if !matches!(&workspace_structure_change, Some((.., true))) {
|
||||||
_ = self
|
_ = self
|
||||||
|
|
|
@ -13,7 +13,7 @@ use triomphe::Arc;
|
||||||
use vfs::{AbsPathBuf, ChangeKind, VfsPath};
|
use vfs::{AbsPathBuf, ChangeKind, VfsPath};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::Config,
|
config::{Config, ConfigChange, ConfigError},
|
||||||
global_state::GlobalState,
|
global_state::GlobalState,
|
||||||
lsp::{from_proto, utils::apply_document_changes},
|
lsp::{from_proto, utils::apply_document_changes},
|
||||||
lsp_ext::{self, RunFlycheckParams},
|
lsp_ext::{self, RunFlycheckParams},
|
||||||
|
@ -71,6 +71,7 @@ pub(crate) fn handle_did_open_text_document(
|
||||||
tracing::error!("duplicate DidOpenTextDocument: {}", path);
|
tracing::error!("duplicate DidOpenTextDocument: {}", path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tracing::info!("New file content set {:?}", params.text_document.text);
|
||||||
state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes()));
|
state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes()));
|
||||||
if state.config.notifications().unindexed_project {
|
if state.config.notifications().unindexed_project {
|
||||||
tracing::debug!("queuing task");
|
tracing::debug!("queuing task");
|
||||||
|
@ -196,10 +197,11 @@ pub(crate) fn handle_did_change_configuration(
|
||||||
}
|
}
|
||||||
(None, Some(mut configs)) => {
|
(None, Some(mut configs)) => {
|
||||||
if let Some(json) = configs.get_mut(0) {
|
if let Some(json) = configs.get_mut(0) {
|
||||||
// Note that json can be null according to the spec if the client can't
|
|
||||||
// provide a configuration. This is handled in Config::update below.
|
|
||||||
let mut config = Config::clone(&*this.config);
|
let mut config = Config::clone(&*this.config);
|
||||||
this.config_errors = config.update(json.take()).err();
|
let mut change = ConfigChange::default();
|
||||||
|
change.change_client_config(json.take());
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
this.update_configuration(config);
|
this.update_configuration(config);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -367,7 +367,7 @@ pub(crate) fn handle_join_lines(
|
||||||
let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered();
|
let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered();
|
||||||
|
|
||||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
let config = snap.config.join_lines(Some(source_root));
|
let config = snap.config.join_lines(Some(source_root));
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
|
|
||||||
|
@ -949,7 +949,7 @@ pub(crate) fn handle_completion(
|
||||||
let completion_trigger_character =
|
let completion_trigger_character =
|
||||||
context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
|
context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
|
||||||
|
|
||||||
let source_root = snap.analysis.source_root(position.file_id)?;
|
let source_root = snap.analysis.source_root_id(position.file_id)?;
|
||||||
let completion_config = &snap.config.completion(Some(source_root));
|
let completion_config = &snap.config.completion(Some(source_root));
|
||||||
// FIXME: We should fix up the position when retrying the cancelled request instead
|
// FIXME: We should fix up the position when retrying the cancelled request instead
|
||||||
position.offset = position.offset.min(line_index.index.len());
|
position.offset = position.offset.min(line_index.index.len());
|
||||||
|
@ -997,7 +997,7 @@ pub(crate) fn handle_completion_resolve(
|
||||||
let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else {
|
let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else {
|
||||||
return Ok(original_completion);
|
return Ok(original_completion);
|
||||||
};
|
};
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
|
|
||||||
let additional_edits = snap
|
let additional_edits = snap
|
||||||
.analysis
|
.analysis
|
||||||
|
@ -1229,7 +1229,7 @@ pub(crate) fn handle_code_action(
|
||||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?;
|
let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?;
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
|
|
||||||
let mut assists_config = snap.config.assist(Some(source_root));
|
let mut assists_config = snap.config.assist(Some(source_root));
|
||||||
assists_config.allowed = params
|
assists_config.allowed = params
|
||||||
|
@ -1307,7 +1307,7 @@ pub(crate) fn handle_code_action_resolve(
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
|
let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
|
||||||
let frange = FileRange { file_id, range };
|
let frange = FileRange { file_id, range };
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
|
|
||||||
let mut assists_config = snap.config.assist(Some(source_root));
|
let mut assists_config = snap.config.assist(Some(source_root));
|
||||||
assists_config.allowed = params
|
assists_config.allowed = params
|
||||||
|
@ -1460,7 +1460,7 @@ pub(crate) fn handle_document_highlight(
|
||||||
let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered();
|
let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered();
|
||||||
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
||||||
let line_index = snap.file_line_index(position.file_id)?;
|
let line_index = snap.file_line_index(position.file_id)?;
|
||||||
let source_root = snap.analysis.source_root(position.file_id)?;
|
let source_root = snap.analysis.source_root_id(position.file_id)?;
|
||||||
|
|
||||||
let refs = match snap
|
let refs = match snap
|
||||||
.analysis
|
.analysis
|
||||||
|
@ -1511,7 +1511,7 @@ pub(crate) fn handle_inlay_hints(
|
||||||
params.range,
|
params.range,
|
||||||
)?;
|
)?;
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
let range = TextRange::new(
|
let range = TextRange::new(
|
||||||
range.start().min(line_index.index.len()),
|
range.start().min(line_index.index.len()),
|
||||||
range.end().min(line_index.index.len()),
|
range.end().min(line_index.index.len()),
|
||||||
|
@ -1553,7 +1553,7 @@ pub(crate) fn handle_inlay_hints_resolve(
|
||||||
|
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
let hint_position = from_proto::offset(&line_index, original_hint.position)?;
|
let hint_position = from_proto::offset(&line_index, original_hint.position)?;
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
|
|
||||||
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(Some(source_root));
|
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(Some(source_root));
|
||||||
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
|
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
|
||||||
|
@ -1687,7 +1687,7 @@ pub(crate) fn handle_semantic_tokens_full(
|
||||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||||
let text = snap.analysis.file_text(file_id)?;
|
let text = snap.analysis.file_text(file_id)?;
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
|
|
||||||
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
|
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
|
||||||
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
|
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
|
||||||
|
@ -1718,7 +1718,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
|
||||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||||
let text = snap.analysis.file_text(file_id)?;
|
let text = snap.analysis.file_text(file_id)?;
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
let source_root = snap.analysis.source_root(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
|
|
||||||
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
|
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
|
||||||
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
|
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
|
||||||
|
@ -1762,7 +1762,7 @@ pub(crate) fn handle_semantic_tokens_range(
|
||||||
let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?;
|
let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?;
|
||||||
let text = snap.analysis.file_text(frange.file_id)?;
|
let text = snap.analysis.file_text(frange.file_id)?;
|
||||||
let line_index = snap.file_line_index(frange.file_id)?;
|
let line_index = snap.file_line_index(frange.file_id)?;
|
||||||
let source_root = snap.analysis.source_root(frange.file_id)?;
|
let source_root = snap.analysis.source_root_id(frange.file_id)?;
|
||||||
|
|
||||||
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
|
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
|
||||||
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
|
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
|
||||||
|
@ -1991,8 +1991,8 @@ fn goto_type_action_links(
|
||||||
snap: &GlobalStateSnapshot,
|
snap: &GlobalStateSnapshot,
|
||||||
nav_targets: &[HoverGotoTypeData],
|
nav_targets: &[HoverGotoTypeData],
|
||||||
) -> Option<lsp_ext::CommandLinkGroup> {
|
) -> Option<lsp_ext::CommandLinkGroup> {
|
||||||
if nav_targets.is_empty()
|
if !snap.config.hover_actions().goto_type_def
|
||||||
|| !snap.config.hover_actions().goto_type_def
|
|| nav_targets.is_empty()
|
||||||
|| !snap.config.client_commands().goto_location
|
|| !snap.config.client_commands().goto_location
|
||||||
{
|
{
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -18,7 +18,6 @@ mod cargo_target_spec;
|
||||||
mod diagnostics;
|
mod diagnostics;
|
||||||
mod diff;
|
mod diff;
|
||||||
mod dispatch;
|
mod dispatch;
|
||||||
mod global_state;
|
|
||||||
mod hack_recover_crate_name;
|
mod hack_recover_crate_name;
|
||||||
mod line_index;
|
mod line_index;
|
||||||
mod main_loop;
|
mod main_loop;
|
||||||
|
@ -40,6 +39,7 @@ pub mod tracing {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod config;
|
pub mod config;
|
||||||
|
pub mod global_state;
|
||||||
pub mod lsp;
|
pub mod lsp;
|
||||||
use self::lsp::ext as lsp_ext;
|
use self::lsp::ext as lsp_ext;
|
||||||
|
|
||||||
|
|
|
@ -186,6 +186,11 @@ impl GlobalState {
|
||||||
scheme: None,
|
scheme: None,
|
||||||
pattern: Some("**/Cargo.lock".into()),
|
pattern: Some("**/Cargo.lock".into()),
|
||||||
},
|
},
|
||||||
|
lsp_types::DocumentFilter {
|
||||||
|
language: None,
|
||||||
|
scheme: None,
|
||||||
|
pattern: Some("**/rust-analyzer.toml".into()),
|
||||||
|
},
|
||||||
]),
|
]),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -24,14 +24,16 @@ use ide_db::{
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use load_cargo::{load_proc_macro, ProjectFolders};
|
use load_cargo::{load_proc_macro, ProjectFolders};
|
||||||
|
use lsp_types::FileSystemWatcher;
|
||||||
use proc_macro_api::ProcMacroServer;
|
use proc_macro_api::ProcMacroServer;
|
||||||
use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
|
use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
|
||||||
use stdx::{format_to, thread::ThreadIntent};
|
use stdx::{format_to, thread::ThreadIntent};
|
||||||
|
use tracing::error;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use vfs::{AbsPath, AbsPathBuf, ChangeKind};
|
use vfs::{AbsPath, AbsPathBuf, ChangeKind};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::{Config, FilesWatcher, LinkedProject},
|
config::{Config, ConfigChange, ConfigError, FilesWatcher, LinkedProject},
|
||||||
global_state::GlobalState,
|
global_state::GlobalState,
|
||||||
lsp_ext,
|
lsp_ext,
|
||||||
main_loop::Task,
|
main_loop::Task,
|
||||||
|
@ -443,40 +445,61 @@ impl GlobalState {
|
||||||
let filter =
|
let filter =
|
||||||
self.workspaces.iter().flat_map(|ws| ws.to_roots()).filter(|it| it.is_local);
|
self.workspaces.iter().flat_map(|ws| ws.to_roots()).filter(|it| it.is_local);
|
||||||
|
|
||||||
let watchers = if self.config.did_change_watched_files_relative_pattern_support() {
|
let mut watchers: Vec<FileSystemWatcher> =
|
||||||
// When relative patterns are supported by the client, prefer using them
|
if self.config.did_change_watched_files_relative_pattern_support() {
|
||||||
filter
|
// When relative patterns are supported by the client, prefer using them
|
||||||
.flat_map(|root| {
|
filter
|
||||||
root.include.into_iter().flat_map(|base| {
|
.flat_map(|root| {
|
||||||
[(base.clone(), "**/*.rs"), (base, "**/Cargo.{lock,toml}")]
|
root.include.into_iter().flat_map(|base| {
|
||||||
|
[
|
||||||
|
(base.clone(), "**/*.rs"),
|
||||||
|
(base.clone(), "**/Cargo.{lock,toml}"),
|
||||||
|
(base, "**/rust-analyzer.toml"),
|
||||||
|
]
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
.map(|(base, pat)| lsp_types::FileSystemWatcher {
|
||||||
.map(|(base, pat)| lsp_types::FileSystemWatcher {
|
glob_pattern: lsp_types::GlobPattern::Relative(
|
||||||
glob_pattern: lsp_types::GlobPattern::Relative(
|
lsp_types::RelativePattern {
|
||||||
lsp_types::RelativePattern {
|
base_uri: lsp_types::OneOf::Right(
|
||||||
base_uri: lsp_types::OneOf::Right(
|
lsp_types::Url::from_file_path(base).unwrap(),
|
||||||
lsp_types::Url::from_file_path(base).unwrap(),
|
),
|
||||||
),
|
pattern: pat.to_owned(),
|
||||||
pattern: pat.to_owned(),
|
},
|
||||||
},
|
),
|
||||||
),
|
kind: None,
|
||||||
kind: None,
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
} else {
|
|
||||||
// When they're not, integrate the base to make them into absolute patterns
|
|
||||||
filter
|
|
||||||
.flat_map(|root| {
|
|
||||||
root.include.into_iter().flat_map(|base| {
|
|
||||||
[format!("{base}/**/*.rs"), format!("{base}/**/Cargo.{{lock,toml}}")]
|
|
||||||
})
|
})
|
||||||
})
|
.collect()
|
||||||
|
} else {
|
||||||
|
// When they're not, integrate the base to make them into absolute patterns
|
||||||
|
filter
|
||||||
|
.flat_map(|root| {
|
||||||
|
root.include.into_iter().flat_map(|it| {
|
||||||
|
[
|
||||||
|
format!("{it}/**/*.rs"),
|
||||||
|
// FIXME @alibektas : Following dbarsky's recomm I merged toml and lock patterns into one.
|
||||||
|
// Is this correct?
|
||||||
|
format!("{it}/**/Cargo.{{toml,lock}}"),
|
||||||
|
format!("{it}/**/rust-analyzer.toml"),
|
||||||
|
]
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.map(|glob_pattern| lsp_types::FileSystemWatcher {
|
||||||
|
glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
|
||||||
|
kind: None,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
};
|
||||||
|
|
||||||
|
watchers.extend(
|
||||||
|
iter::once(self.config.user_config_path().to_string())
|
||||||
|
.chain(iter::once(self.config.root_ratoml_path().to_string()))
|
||||||
.map(|glob_pattern| lsp_types::FileSystemWatcher {
|
.map(|glob_pattern| lsp_types::FileSystemWatcher {
|
||||||
glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
|
glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
|
||||||
kind: None,
|
kind: None,
|
||||||
})
|
})
|
||||||
.collect()
|
.collect::<Vec<FileSystemWatcher>>(),
|
||||||
};
|
);
|
||||||
|
|
||||||
let registration_options =
|
let registration_options =
|
||||||
lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers };
|
lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers };
|
||||||
|
@ -548,7 +571,41 @@ impl GlobalState {
|
||||||
version: self.vfs_config_version,
|
version: self.vfs_config_version,
|
||||||
});
|
});
|
||||||
self.source_root_config = project_folders.source_root_config;
|
self.source_root_config = project_folders.source_root_config;
|
||||||
self.local_roots_parent_map = self.source_root_config.source_root_parent_map();
|
self.local_roots_parent_map = Arc::new(self.source_root_config.source_root_parent_map());
|
||||||
|
|
||||||
|
let user_config_path = self.config.user_config_path();
|
||||||
|
let root_ratoml_path = self.config.root_ratoml_path();
|
||||||
|
|
||||||
|
{
|
||||||
|
let vfs = &mut self.vfs.write().0;
|
||||||
|
let loader = &mut self.loader;
|
||||||
|
|
||||||
|
if vfs.file_id(user_config_path).is_none() {
|
||||||
|
if let Some(user_cfg_abs) = user_config_path.as_path() {
|
||||||
|
let contents = loader.handle.load_sync(user_cfg_abs);
|
||||||
|
vfs.set_file_contents(user_config_path.clone(), contents);
|
||||||
|
} else {
|
||||||
|
error!("Non-abs virtual path for user config.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if vfs.file_id(root_ratoml_path).is_none() {
|
||||||
|
// FIXME @alibektas : Sometimes root_path_ratoml collide with a regular ratoml.
|
||||||
|
// Although this shouldn't be a problem because everything is mapped to a `FileId`.
|
||||||
|
// We may want to further think about this.
|
||||||
|
if let Some(root_ratoml_abs) = root_ratoml_path.as_path() {
|
||||||
|
let contents = loader.handle.load_sync(root_ratoml_abs);
|
||||||
|
vfs.set_file_contents(root_ratoml_path.clone(), contents);
|
||||||
|
} else {
|
||||||
|
error!("Non-abs virtual path for user config.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut config_change = ConfigChange::default();
|
||||||
|
config_change.change_source_root_parent_map(self.local_roots_parent_map.clone());
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
self.config = Arc::new(self.config.apply_change(config_change, &mut error_sink));
|
||||||
|
|
||||||
self.recreate_crate_graph(cause);
|
self.recreate_crate_graph(cause);
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ use tracing_tree::HierarchicalLayer;
|
||||||
|
|
||||||
use crate::tracing::hprof;
|
use crate::tracing::hprof;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Config<T> {
|
pub struct Config<T> {
|
||||||
pub writer: T,
|
pub writer: T,
|
||||||
pub filter: String,
|
pub filter: String,
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -9,7 +9,10 @@ use crossbeam_channel::{after, select, Receiver};
|
||||||
use lsp_server::{Connection, Message, Notification, Request};
|
use lsp_server::{Connection, Message, Notification, Request};
|
||||||
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
|
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
|
||||||
use paths::{Utf8Path, Utf8PathBuf};
|
use paths::{Utf8Path, Utf8PathBuf};
|
||||||
use rust_analyzer::{config::Config, lsp, main_loop};
|
use rust_analyzer::{
|
||||||
|
config::{Config, ConfigChange, ConfigError},
|
||||||
|
lsp, main_loop,
|
||||||
|
};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::{json, to_string_pretty, Value};
|
use serde_json::{json, to_string_pretty, Value};
|
||||||
use test_utils::FixtureWithProjectMeta;
|
use test_utils::FixtureWithProjectMeta;
|
||||||
|
@ -24,6 +27,7 @@ pub(crate) struct Project<'a> {
|
||||||
roots: Vec<Utf8PathBuf>,
|
roots: Vec<Utf8PathBuf>,
|
||||||
config: serde_json::Value,
|
config: serde_json::Value,
|
||||||
root_dir_contains_symlink: bool,
|
root_dir_contains_symlink: bool,
|
||||||
|
user_config_path: Option<Utf8PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project<'_> {
|
impl Project<'_> {
|
||||||
|
@ -47,9 +51,15 @@ impl Project<'_> {
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
root_dir_contains_symlink: false,
|
root_dir_contains_symlink: false,
|
||||||
|
user_config_path: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn user_config_dir(mut self, config_path_dir: TestDir) -> Self {
|
||||||
|
self.user_config_path = Some(config_path_dir.path().to_owned());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self {
|
pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self {
|
||||||
self.tmp_dir = Some(tmp_dir);
|
self.tmp_dir = Some(tmp_dir);
|
||||||
self
|
self
|
||||||
|
@ -111,10 +121,17 @@ impl Project<'_> {
|
||||||
assert!(proc_macro_names.is_empty());
|
assert!(proc_macro_names.is_empty());
|
||||||
assert!(mini_core.is_none());
|
assert!(mini_core.is_none());
|
||||||
assert!(toolchain.is_none());
|
assert!(toolchain.is_none());
|
||||||
|
|
||||||
for entry in fixture {
|
for entry in fixture {
|
||||||
let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]);
|
if let Some(pth) = entry.path.strip_prefix("/$$CONFIG_DIR$$") {
|
||||||
fs::create_dir_all(path.parent().unwrap()).unwrap();
|
let path = self.user_config_path.clone().unwrap().join(&pth['/'.len_utf8()..]);
|
||||||
fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
|
fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||||
|
fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
|
||||||
|
} else {
|
||||||
|
let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]);
|
||||||
|
fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||||
|
fs::write(path.as_path(), entry.text.as_bytes()).unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let tmp_dir_path = AbsPathBuf::assert(tmp_dir.path().to_path_buf());
|
let tmp_dir_path = AbsPathBuf::assert(tmp_dir.path().to_path_buf());
|
||||||
|
@ -184,8 +201,14 @@ impl Project<'_> {
|
||||||
},
|
},
|
||||||
roots,
|
roots,
|
||||||
None,
|
None,
|
||||||
|
self.user_config_path,
|
||||||
);
|
);
|
||||||
config.update(self.config).expect("invalid config");
|
let mut change = ConfigChange::default();
|
||||||
|
|
||||||
|
change.change_client_config(self.config);
|
||||||
|
let mut error_sink = ConfigError::default();
|
||||||
|
assert!(error_sink.is_empty());
|
||||||
|
config = config.apply_change(change, &mut error_sink);
|
||||||
config.rediscover_workspaces();
|
config.rediscover_workspaces();
|
||||||
|
|
||||||
Server::new(tmp_dir.keep(), config)
|
Server::new(tmp_dir.keep(), config)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue