mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 21:05:02 +00:00
⬆️ rust-analyzer
This commit is contained in:
parent
61c744d4fd
commit
a2a1d99545
126 changed files with 2098 additions and 904 deletions
|
@ -8,7 +8,7 @@ documentation = "https://rust-analyzer.github.io/manual.html"
|
|||
license = "MIT OR Apache-2.0"
|
||||
autobins = false
|
||||
edition = "2021"
|
||||
rust-version = "1.57"
|
||||
rust-version = "1.65"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
use std::mem;
|
||||
|
||||
use cfg::{CfgAtom, CfgExpr};
|
||||
use ide::{FileId, RunnableKind, TestId};
|
||||
use ide::{Cancellable, FileId, RunnableKind, TestId};
|
||||
use project_model::{self, CargoFeatures, ManifestPath, TargetKind};
|
||||
use vfs::AbsPathBuf;
|
||||
|
||||
use crate::{global_state::GlobalStateSnapshot, Result};
|
||||
use crate::global_state::GlobalStateSnapshot;
|
||||
|
||||
/// Abstract representation of Cargo target.
|
||||
///
|
||||
|
@ -29,7 +29,7 @@ impl CargoTargetSpec {
|
|||
spec: Option<CargoTargetSpec>,
|
||||
kind: &RunnableKind,
|
||||
cfg: &Option<CfgExpr>,
|
||||
) -> Result<(Vec<String>, Vec<String>)> {
|
||||
) -> (Vec<String>, Vec<String>) {
|
||||
let mut args = Vec::new();
|
||||
let mut extra_args = Vec::new();
|
||||
|
||||
|
@ -111,13 +111,13 @@ impl CargoTargetSpec {
|
|||
}
|
||||
}
|
||||
}
|
||||
Ok((args, extra_args))
|
||||
(args, extra_args)
|
||||
}
|
||||
|
||||
pub(crate) fn for_file(
|
||||
global_state_snapshot: &GlobalStateSnapshot,
|
||||
file_id: FileId,
|
||||
) -> Result<Option<CargoTargetSpec>> {
|
||||
) -> Cancellable<Option<CargoTargetSpec>> {
|
||||
let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? {
|
||||
&[crate_id, ..] => crate_id,
|
||||
_ => return Ok(None),
|
||||
|
|
|
@ -60,24 +60,12 @@ pub fn load_workspace(
|
|||
};
|
||||
|
||||
let proc_macro_client = if load_config.with_proc_macro {
|
||||
let mut path = AbsPathBuf::assert(std::env::current_exe()?);
|
||||
let mut args = vec!["proc-macro"];
|
||||
let (server_path, args): (_, &[_]) = match ws.find_sysroot_proc_macro_srv() {
|
||||
Some(server_path) => (server_path, &[]),
|
||||
None => (AbsPathBuf::assert(std::env::current_exe()?), &["proc-macro"]),
|
||||
};
|
||||
|
||||
if let ProjectWorkspace::Cargo { sysroot, .. } | ProjectWorkspace::Json { sysroot, .. } =
|
||||
&ws
|
||||
{
|
||||
if let Some(sysroot) = sysroot.as_ref() {
|
||||
let standalone_server_name =
|
||||
format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
|
||||
let server_path = sysroot.root().join("libexec").join(&standalone_server_name);
|
||||
if std::fs::metadata(&server_path).is_ok() {
|
||||
path = server_path;
|
||||
args = vec![];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|e| e.to_string())
|
||||
ProcMacroServer::spawn(server_path, args).map_err(|e| e.to_string())
|
||||
} else {
|
||||
Err("proc macro server disabled".to_owned())
|
||||
};
|
||||
|
|
|
@ -47,30 +47,27 @@ impl flags::Scip {
|
|||
|
||||
let si = StaticIndex::compute(&analysis);
|
||||
|
||||
let mut index = scip_types::Index {
|
||||
metadata: Some(scip_types::Metadata {
|
||||
version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
|
||||
tool_info: Some(scip_types::ToolInfo {
|
||||
name: "rust-analyzer".to_owned(),
|
||||
version: "0.1".to_owned(),
|
||||
arguments: vec![],
|
||||
..Default::default()
|
||||
})
|
||||
.into(),
|
||||
project_root: format!(
|
||||
"file://{}",
|
||||
path.normalize()
|
||||
.as_os_str()
|
||||
.to_str()
|
||||
.ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
|
||||
.to_string()
|
||||
),
|
||||
text_document_encoding: scip_types::TextEncoding::UTF8.into(),
|
||||
..Default::default()
|
||||
let metadata = scip_types::Metadata {
|
||||
version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
|
||||
tool_info: Some(scip_types::ToolInfo {
|
||||
name: "rust-analyzer".to_owned(),
|
||||
version: "0.1".to_owned(),
|
||||
arguments: vec![],
|
||||
special_fields: Default::default(),
|
||||
})
|
||||
.into(),
|
||||
..Default::default()
|
||||
project_root: format!(
|
||||
"file://{}",
|
||||
path.normalize()
|
||||
.as_os_str()
|
||||
.to_str()
|
||||
.ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
|
||||
.to_string()
|
||||
),
|
||||
text_document_encoding: scip_types::TextEncoding::UTF8.into(),
|
||||
special_fields: Default::default(),
|
||||
};
|
||||
let mut documents = Vec::new();
|
||||
|
||||
let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
|
||||
let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
|
||||
|
@ -95,18 +92,14 @@ impl flags::Scip {
|
|||
endings: LineEndings::Unix,
|
||||
};
|
||||
|
||||
let mut doc = scip_types::Document {
|
||||
relative_path,
|
||||
language: "rust".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
let mut occurrences = Vec::new();
|
||||
let mut symbols = Vec::new();
|
||||
|
||||
tokens.into_iter().for_each(|(range, id)| {
|
||||
tokens.into_iter().for_each(|(text_range, id)| {
|
||||
let token = si.tokens.get(id).unwrap();
|
||||
|
||||
let mut occurrence = scip_types::Occurrence::default();
|
||||
occurrence.range = text_range_to_scip_range(&line_index, range);
|
||||
occurrence.symbol = tokens_to_symbol
|
||||
let range = text_range_to_scip_range(&line_index, text_range);
|
||||
let symbol = tokens_to_symbol
|
||||
.entry(id)
|
||||
.or_insert_with(|| {
|
||||
let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol);
|
||||
|
@ -114,34 +107,62 @@ impl flags::Scip {
|
|||
})
|
||||
.clone();
|
||||
|
||||
let mut symbol_roles = Default::default();
|
||||
|
||||
if let Some(def) = token.definition {
|
||||
if def.range == range {
|
||||
occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32;
|
||||
if def.range == text_range {
|
||||
symbol_roles |= scip_types::SymbolRole::Definition as i32;
|
||||
}
|
||||
|
||||
if symbols_emitted.insert(id) {
|
||||
let mut symbol_info = scip_types::SymbolInformation::default();
|
||||
symbol_info.symbol = occurrence.symbol.clone();
|
||||
if let Some(hover) = &token.hover {
|
||||
if !hover.markup.as_str().is_empty() {
|
||||
symbol_info.documentation = vec![hover.markup.as_str().to_string()];
|
||||
}
|
||||
}
|
||||
let documentation = token
|
||||
.hover
|
||||
.as_ref()
|
||||
.map(|hover| hover.markup.as_str())
|
||||
.filter(|it| !it.is_empty())
|
||||
.map(|it| vec![it.to_owned()]);
|
||||
let symbol_info = scip_types::SymbolInformation {
|
||||
symbol: symbol.clone(),
|
||||
documentation: documentation.unwrap_or_default(),
|
||||
relationships: Vec::new(),
|
||||
special_fields: Default::default(),
|
||||
};
|
||||
|
||||
doc.symbols.push(symbol_info)
|
||||
symbols.push(symbol_info)
|
||||
}
|
||||
}
|
||||
|
||||
doc.occurrences.push(occurrence);
|
||||
occurrences.push(scip_types::Occurrence {
|
||||
range,
|
||||
symbol,
|
||||
symbol_roles,
|
||||
override_documentation: Vec::new(),
|
||||
syntax_kind: Default::default(),
|
||||
diagnostics: Vec::new(),
|
||||
special_fields: Default::default(),
|
||||
});
|
||||
});
|
||||
|
||||
if doc.occurrences.is_empty() {
|
||||
if occurrences.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
index.documents.push(doc);
|
||||
documents.push(scip_types::Document {
|
||||
relative_path,
|
||||
language: "rust".to_string(),
|
||||
occurrences,
|
||||
symbols,
|
||||
special_fields: Default::default(),
|
||||
});
|
||||
}
|
||||
|
||||
let index = scip_types::Index {
|
||||
metadata: Some(metadata).into(),
|
||||
documents,
|
||||
external_symbols: Vec::new(),
|
||||
special_fields: Default::default(),
|
||||
};
|
||||
|
||||
scip::write_message_to_file("index.scip", index)
|
||||
.map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
|
||||
|
||||
|
@ -181,7 +202,7 @@ fn new_descriptor_str(
|
|||
name: name.to_string(),
|
||||
disambiguator: "".to_string(),
|
||||
suffix: suffix.into(),
|
||||
..Default::default()
|
||||
special_fields: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -232,11 +253,11 @@ fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> {
|
|||
manager: "cargo".to_string(),
|
||||
name: package_name,
|
||||
version: version.unwrap_or_else(|| ".".to_string()),
|
||||
..Default::default()
|
||||
special_fields: Default::default(),
|
||||
})
|
||||
.into(),
|
||||
descriptors,
|
||||
..Default::default()
|
||||
special_fields: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -118,9 +118,11 @@ config_data! {
|
|||
/// This option does not take effect until rust-analyzer is restarted.
|
||||
cargo_sysroot: Option<String> = "\"discover\"",
|
||||
/// Compilation target override (target triple).
|
||||
// FIXME(@poliorcetics): move to multiple targets here too, but this will need more work
|
||||
// than `checkOnSave_target`
|
||||
cargo_target: Option<String> = "null",
|
||||
/// Unsets `#[cfg(test)]` for the specified crates.
|
||||
cargo_unsetTest: Vec<String> = "[\"core\"]",
|
||||
cargo_unsetTest: Vec<String> = "[\"core\"]",
|
||||
|
||||
/// Check all targets and tests (`--all-targets`).
|
||||
checkOnSave_allTargets: bool = "true",
|
||||
|
@ -157,7 +159,7 @@ config_data! {
|
|||
checkOnSave_noDefaultFeatures: Option<bool> = "null",
|
||||
/// Override the command rust-analyzer uses instead of `cargo check` for
|
||||
/// diagnostics on save. The command is required to output json and
|
||||
/// should therefor include `--message-format=json` or a similar option.
|
||||
/// should therefore include `--message-format=json` or a similar option.
|
||||
///
|
||||
/// If you're changing this because you're using some tool wrapping
|
||||
/// Cargo, you might also want to change
|
||||
|
@ -174,9 +176,13 @@ config_data! {
|
|||
/// ```
|
||||
/// .
|
||||
checkOnSave_overrideCommand: Option<Vec<String>> = "null",
|
||||
/// Check for a specific target. Defaults to
|
||||
/// `#rust-analyzer.cargo.target#`.
|
||||
checkOnSave_target: Option<String> = "null",
|
||||
/// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
|
||||
///
|
||||
/// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
|
||||
/// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
|
||||
///
|
||||
/// Aliased as `"checkOnSave.targets"`.
|
||||
checkOnSave_target | checkOnSave_targets: CheckOnSaveTargets = "[]",
|
||||
|
||||
/// Toggles the additional completions that automatically add imports when completed.
|
||||
/// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
|
||||
|
@ -261,6 +267,7 @@ config_data! {
|
|||
files_excludeDirs: Vec<PathBuf> = "[]",
|
||||
/// Controls file watching implementation.
|
||||
files_watcher: FilesWatcherDef = "\"client\"",
|
||||
|
||||
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
|
||||
highlightRelated_breakPoints_enable: bool = "true",
|
||||
/// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
|
||||
|
@ -320,6 +327,8 @@ config_data! {
|
|||
inlayHints_closingBraceHints_minLines: usize = "25",
|
||||
/// Whether to show inlay type hints for return types of closures.
|
||||
inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
|
||||
/// Whether to show inlay hints for type adjustments.
|
||||
inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"",
|
||||
/// Whether to show inlay type hints for elided lifetimes in function signatures.
|
||||
inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
|
||||
/// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
|
||||
|
@ -329,7 +338,8 @@ config_data! {
|
|||
/// Whether to show function parameter name inlay hints at the call
|
||||
/// site.
|
||||
inlayHints_parameterHints_enable: bool = "true",
|
||||
/// Whether to show inlay type hints for compiler inserted reborrows.
|
||||
/// Whether to show inlay hints for compiler inserted reborrows.
|
||||
/// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
|
||||
inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"",
|
||||
/// Whether to render leading colons for type hints, and trailing colons for parameter hints.
|
||||
inlayHints_renderColons: bool = "true",
|
||||
|
@ -1143,11 +1153,10 @@ impl Config {
|
|||
}
|
||||
Some(_) | None => FlycheckConfig::CargoCommand {
|
||||
command: self.data.checkOnSave_command.clone(),
|
||||
target_triple: self
|
||||
.data
|
||||
.checkOnSave_target
|
||||
.clone()
|
||||
.or_else(|| self.data.cargo_target.clone()),
|
||||
target_triples: match &self.data.checkOnSave_target.0[..] {
|
||||
[] => self.data.cargo_target.clone().into_iter().collect(),
|
||||
targets => targets.into(),
|
||||
},
|
||||
all_targets: self.data.checkOnSave_allTargets,
|
||||
no_default_features: self
|
||||
.data
|
||||
|
@ -1200,10 +1209,15 @@ impl Config {
|
|||
hide_closure_initialization_hints: self
|
||||
.data
|
||||
.inlayHints_typeHints_hideClosureInitialization,
|
||||
reborrow_hints: match self.data.inlayHints_reborrowHints_enable {
|
||||
ReborrowHintsDef::Always => ide::ReborrowHints::Always,
|
||||
ReborrowHintsDef::Never => ide::ReborrowHints::Never,
|
||||
ReborrowHintsDef::Mutable => ide::ReborrowHints::MutableOnly,
|
||||
adjustment_hints: match self.data.inlayHints_expressionAdjustmentHints_enable {
|
||||
AdjustmentHintsDef::Always => ide::AdjustmentHints::Always,
|
||||
AdjustmentHintsDef::Never => match self.data.inlayHints_reborrowHints_enable {
|
||||
ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => {
|
||||
ide::AdjustmentHints::ReborrowOnly
|
||||
}
|
||||
ReborrowHintsDef::Never => ide::AdjustmentHints::Never,
|
||||
},
|
||||
AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly,
|
||||
},
|
||||
binding_mode_hints: self.data.inlayHints_bindingModeHints_enable,
|
||||
param_names_for_lifetime_elision_hints: self
|
||||
|
@ -1538,6 +1552,7 @@ mod de_unit_v {
|
|||
named_unit_variant!(all);
|
||||
named_unit_variant!(skip_trivial);
|
||||
named_unit_variant!(mutable);
|
||||
named_unit_variant!(reborrow);
|
||||
named_unit_variant!(with_block);
|
||||
}
|
||||
|
||||
|
@ -1647,6 +1662,9 @@ enum InvocationStrategy {
|
|||
PerWorkspace,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
struct CheckOnSaveTargets(#[serde(deserialize_with = "single_or_array")] Vec<String>);
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum InvocationLocation {
|
||||
|
@ -1687,6 +1705,17 @@ enum ReborrowHintsDef {
|
|||
Mutable,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged)]
|
||||
enum AdjustmentHintsDef {
|
||||
#[serde(deserialize_with = "true_or_always")]
|
||||
Always,
|
||||
#[serde(deserialize_with = "false_or_never")]
|
||||
Never,
|
||||
#[serde(deserialize_with = "de_unit_v::reborrow")]
|
||||
Reborrow,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum FilesWatcherDef {
|
||||
|
@ -1996,6 +2025,19 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
|
|||
"Only show mutable reborrow hints."
|
||||
]
|
||||
},
|
||||
"AdjustmentHintsDef" => set! {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"always",
|
||||
"never",
|
||||
"reborrow"
|
||||
],
|
||||
"enumDescriptions": [
|
||||
"Always show all adjustment hints.",
|
||||
"Never show adjustment hints.",
|
||||
"Only show auto borrow and dereference adjustment hints."
|
||||
]
|
||||
},
|
||||
"CargoFeaturesDef" => set! {
|
||||
"anyOf": [
|
||||
{
|
||||
|
@ -2084,6 +2126,17 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
|
|||
"The command will be executed in the project root."
|
||||
],
|
||||
},
|
||||
"CheckOnSaveTargets" => set! {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "type": "string" }
|
||||
},
|
||||
],
|
||||
},
|
||||
_ => panic!("missing entry for {}: {}", ty, default),
|
||||
}
|
||||
|
||||
|
|
|
@ -359,14 +359,15 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
|
|||
.iter()
|
||||
.flat_map(|primary_span| {
|
||||
let primary_location = primary_location(config, workspace_root, primary_span, snap);
|
||||
|
||||
let mut message = message.clone();
|
||||
if needs_primary_span_label {
|
||||
if let Some(primary_span_label) = &primary_span.label {
|
||||
format_to!(message, "\n{}", primary_span_label);
|
||||
let message = {
|
||||
let mut message = message.clone();
|
||||
if needs_primary_span_label {
|
||||
if let Some(primary_span_label) = &primary_span.label {
|
||||
format_to!(message, "\n{}", primary_span_label);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
message
|
||||
};
|
||||
// Each primary diagnostic span may result in multiple LSP diagnostics.
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
|
@ -417,7 +418,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
|
|||
message: message.clone(),
|
||||
related_information: Some(information_for_additional_diagnostic),
|
||||
tags: if tags.is_empty() { None } else { Some(tags.clone()) },
|
||||
data: None,
|
||||
data: Some(serde_json::json!({ "rendered": rd.rendered })),
|
||||
};
|
||||
diagnostics.push(MappedRustDiagnostic {
|
||||
url: secondary_location.uri,
|
||||
|
@ -449,7 +450,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
|
|||
}
|
||||
},
|
||||
tags: if tags.is_empty() { None } else { Some(tags.clone()) },
|
||||
data: None,
|
||||
data: Some(serde_json::json!({ "rendered": rd.rendered })),
|
||||
},
|
||||
fix: None,
|
||||
});
|
||||
|
@ -534,7 +535,8 @@ mod tests {
|
|||
Config::new(workspace_root.to_path_buf(), ClientCapabilities::default()),
|
||||
);
|
||||
let snap = state.snapshot();
|
||||
let actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
|
||||
let mut actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
|
||||
actual.iter_mut().for_each(|diag| diag.diagnostic.data = None);
|
||||
expect.assert_debug_eq(&actual)
|
||||
}
|
||||
|
||||
|
|
|
@ -42,8 +42,10 @@ pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> R
|
|||
pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result<TextRange> {
|
||||
let start = offset(line_index, range.start)?;
|
||||
let end = offset(line_index, range.end)?;
|
||||
let text_range = TextRange::new(start, end);
|
||||
Ok(text_range)
|
||||
match end < start {
|
||||
true => Err(format_err!("Invalid Range").into()),
|
||||
false => Ok(TextRange::new(start, end)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
|
||||
|
|
|
@ -100,7 +100,7 @@ pub(crate) struct GlobalState {
|
|||
/// the user just adds comments or whitespace to Cargo.toml, we do not want
|
||||
/// to invalidate any salsa caches.
|
||||
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
||||
pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
|
||||
pub(crate) fetch_workspaces_queue: OpQueue<Option<Vec<anyhow::Result<ProjectWorkspace>>>>,
|
||||
pub(crate) fetch_build_data_queue:
|
||||
OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
|
||||
|
||||
|
|
|
@ -9,9 +9,9 @@ use std::{
|
|||
|
||||
use anyhow::Context;
|
||||
use ide::{
|
||||
AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange,
|
||||
HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind,
|
||||
SingleResolve, SourceChange, TextEdit,
|
||||
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FileId, FilePosition,
|
||||
FileRange, HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable,
|
||||
RunnableKind, SingleResolve, SourceChange, TextEdit,
|
||||
};
|
||||
use ide_db::SymbolKind;
|
||||
use lsp_server::ErrorCode;
|
||||
|
@ -556,7 +556,7 @@ pub(crate) fn handle_will_rename_files(
|
|||
if source_change.source_file_edits.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
to_proto::workspace_edit(&snap, source_change).map(Some)
|
||||
Ok(Some(to_proto::workspace_edit(&snap, source_change)?))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1313,7 +1313,7 @@ pub(crate) fn handle_ssr(
|
|||
position,
|
||||
selections,
|
||||
)??;
|
||||
to_proto::workspace_edit(&snap, source_change)
|
||||
to_proto::workspace_edit(&snap, source_change).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub(crate) fn publish_diagnostics(
|
||||
|
@ -1354,13 +1354,12 @@ pub(crate) fn handle_inlay_hints(
|
|||
) -> Result<Option<Vec<InlayHint>>> {
|
||||
let _p = profile::span("handle_inlay_hints");
|
||||
let document_uri = ¶ms.text_document.uri;
|
||||
let file_id = from_proto::file_id(&snap, document_uri)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let range = from_proto::file_range(
|
||||
let FileRange { file_id, range } = from_proto::file_range(
|
||||
&snap,
|
||||
TextDocumentIdentifier::new(document_uri.to_owned()),
|
||||
params.range,
|
||||
)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let inlay_hints_config = snap.config.inlay_hints();
|
||||
Ok(Some(
|
||||
snap.analysis
|
||||
|
@ -1369,7 +1368,7 @@ pub(crate) fn handle_inlay_hints(
|
|||
.map(|it| {
|
||||
to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it)
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?,
|
||||
.collect::<Cancellable<Vec<_>>>()?,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -1426,7 +1425,7 @@ pub(crate) fn handle_call_hierarchy_prepare(
|
|||
.into_iter()
|
||||
.filter(|it| it.kind == Some(SymbolKind::Function))
|
||||
.map(|it| to_proto::call_hierarchy_item(&snap, it))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
.collect::<Cancellable<Vec<_>>>()?;
|
||||
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
|
|
@ -27,10 +27,6 @@ pub(crate) enum LineEndings {
|
|||
impl LineEndings {
|
||||
/// Replaces `\r\n` with `\n` in-place in `src`.
|
||||
pub(crate) fn normalize(src: String) -> (String, LineEndings) {
|
||||
if !src.as_bytes().contains(&b'\r') {
|
||||
return (src, LineEndings::Unix);
|
||||
}
|
||||
|
||||
// We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
|
||||
// While we *can* call `as_mut_vec` and do surgery on the live string
|
||||
// directly, let's rather steal the contents of `src`. This makes the code
|
||||
|
@ -39,10 +35,19 @@ impl LineEndings {
|
|||
let mut buf = src.into_bytes();
|
||||
let mut gap_len = 0;
|
||||
let mut tail = buf.as_mut_slice();
|
||||
let mut crlf_seen = false;
|
||||
|
||||
let find_crlf = |src: &[u8]| src.windows(2).position(|it| it == b"\r\n");
|
||||
|
||||
loop {
|
||||
let idx = match find_crlf(&tail[gap_len..]) {
|
||||
None => tail.len(),
|
||||
Some(idx) => idx + gap_len,
|
||||
None if crlf_seen => tail.len(),
|
||||
// SAFETY: buf is unchanged and therefore still contains utf8 data
|
||||
None => return (unsafe { String::from_utf8_unchecked(buf) }, LineEndings::Unix),
|
||||
Some(idx) => {
|
||||
crlf_seen = true;
|
||||
idx + gap_len
|
||||
}
|
||||
};
|
||||
tail.copy_within(gap_len..idx, 0);
|
||||
tail = &mut tail[idx - gap_len..];
|
||||
|
@ -54,15 +59,48 @@ impl LineEndings {
|
|||
|
||||
// Account for removed `\r`.
|
||||
// After `set_len`, `buf` is guaranteed to contain utf-8 again.
|
||||
let new_len = buf.len() - gap_len;
|
||||
let src = unsafe {
|
||||
let new_len = buf.len() - gap_len;
|
||||
buf.set_len(new_len);
|
||||
String::from_utf8_unchecked(buf)
|
||||
};
|
||||
return (src, LineEndings::Dos);
|
||||
|
||||
fn find_crlf(src: &[u8]) -> Option<usize> {
|
||||
src.windows(2).position(|it| it == b"\r\n")
|
||||
}
|
||||
(src, LineEndings::Dos)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn unix() {
|
||||
let src = "a\nb\nc\n\n\n\n";
|
||||
let (res, endings) = LineEndings::normalize(src.into());
|
||||
assert_eq!(endings, LineEndings::Unix);
|
||||
assert_eq!(res, src);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dos() {
|
||||
let src = "\r\na\r\n\r\nb\r\nc\r\n\r\n\r\n\r\n";
|
||||
let (res, endings) = LineEndings::normalize(src.into());
|
||||
assert_eq!(endings, LineEndings::Dos);
|
||||
assert_eq!(res, "\na\n\nb\nc\n\n\n\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed() {
|
||||
let src = "a\r\nb\r\nc\r\n\n\r\n\n";
|
||||
let (res, endings) = LineEndings::normalize(src.into());
|
||||
assert_eq!(endings, LineEndings::Dos);
|
||||
assert_eq!(res, "a\nb\nc\n\n\n\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn none() {
|
||||
let src = "abc";
|
||||
let (res, endings) = LineEndings::normalize(src.into());
|
||||
assert_eq!(endings, LineEndings::Unix);
|
||||
assert_eq!(res, src);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! Utilities for LSP-related boilerplate code.
|
||||
use std::{ops::Range, sync::Arc};
|
||||
use std::{mem, ops::Range, sync::Arc};
|
||||
|
||||
use lsp_server::Notification;
|
||||
|
||||
|
@ -133,11 +133,37 @@ impl GlobalState {
|
|||
}
|
||||
|
||||
pub(crate) fn apply_document_changes(
|
||||
old_text: &mut String,
|
||||
content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
|
||||
) {
|
||||
file_contents: impl FnOnce() -> String,
|
||||
mut content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
|
||||
) -> String {
|
||||
// Skip to the last full document change, as it invalidates all previous changes anyways.
|
||||
let mut start = content_changes
|
||||
.iter()
|
||||
.rev()
|
||||
.position(|change| change.range.is_none())
|
||||
.map(|idx| content_changes.len() - idx - 1)
|
||||
.unwrap_or(0);
|
||||
|
||||
let mut text: String = match content_changes.get_mut(start) {
|
||||
// peek at the first content change as an optimization
|
||||
Some(lsp_types::TextDocumentContentChangeEvent { range: None, text, .. }) => {
|
||||
let text = mem::take(text);
|
||||
start += 1;
|
||||
|
||||
// The only change is a full document update
|
||||
if start == content_changes.len() {
|
||||
return text;
|
||||
}
|
||||
text
|
||||
}
|
||||
Some(_) => file_contents(),
|
||||
// we received no content changes
|
||||
None => return file_contents(),
|
||||
};
|
||||
|
||||
let mut line_index = LineIndex {
|
||||
index: Arc::new(ide::LineIndex::new(old_text)),
|
||||
// the index will be overwritten in the bottom loop's first iteration
|
||||
index: Arc::new(ide::LineIndex::new(&text)),
|
||||
// We don't care about line endings or offset encoding here.
|
||||
endings: LineEndings::Unix,
|
||||
encoding: PositionEncoding::Utf16,
|
||||
|
@ -148,38 +174,20 @@ pub(crate) fn apply_document_changes(
|
|||
// Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we
|
||||
// remember the last valid line in the index and only rebuild it if needed.
|
||||
// The VFS will normalize the end of lines to `\n`.
|
||||
enum IndexValid {
|
||||
All,
|
||||
UpToLineExclusive(u32),
|
||||
}
|
||||
|
||||
impl IndexValid {
|
||||
fn covers(&self, line: u32) -> bool {
|
||||
match *self {
|
||||
IndexValid::UpToLineExclusive(to) => to > line,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut index_valid = IndexValid::All;
|
||||
let mut index_valid = !0u32;
|
||||
for change in content_changes {
|
||||
match change.range {
|
||||
Some(range) => {
|
||||
if !index_valid.covers(range.end.line) {
|
||||
line_index.index = Arc::new(ide::LineIndex::new(old_text));
|
||||
}
|
||||
index_valid = IndexValid::UpToLineExclusive(range.start.line);
|
||||
if let Ok(range) = from_proto::text_range(&line_index, range) {
|
||||
old_text.replace_range(Range::<usize>::from(range), &change.text);
|
||||
}
|
||||
// The None case can't happen as we have handled it above already
|
||||
if let Some(range) = change.range {
|
||||
if index_valid <= range.end.line {
|
||||
*Arc::make_mut(&mut line_index.index) = ide::LineIndex::new(&text);
|
||||
}
|
||||
None => {
|
||||
*old_text = change.text;
|
||||
index_valid = IndexValid::UpToLineExclusive(0);
|
||||
index_valid = range.start.line;
|
||||
if let Ok(range) = from_proto::text_range(&line_index, range) {
|
||||
text.replace_range(Range::<usize>::from(range), &change.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
text
|
||||
}
|
||||
|
||||
/// Checks that the edits inside the completion and the additional edits do not overlap.
|
||||
|
@ -242,11 +250,10 @@ mod tests {
|
|||
};
|
||||
}
|
||||
|
||||
let mut text = String::new();
|
||||
apply_document_changes(&mut text, vec![]);
|
||||
let text = apply_document_changes(|| String::new(), vec![]);
|
||||
assert_eq!(text, "");
|
||||
apply_document_changes(
|
||||
&mut text,
|
||||
let text = apply_document_changes(
|
||||
|| text,
|
||||
vec![TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
|
@ -254,39 +261,39 @@ mod tests {
|
|||
}],
|
||||
);
|
||||
assert_eq!(text, "the");
|
||||
apply_document_changes(&mut text, c![0, 3; 0, 3 => " quick"]);
|
||||
let text = apply_document_changes(|| text, c![0, 3; 0, 3 => " quick"]);
|
||||
assert_eq!(text, "the quick");
|
||||
apply_document_changes(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
|
||||
let text = apply_document_changes(|| text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
|
||||
assert_eq!(text, "quick foxes");
|
||||
apply_document_changes(&mut text, c![0, 11; 0, 11 => "\ndream"]);
|
||||
let text = apply_document_changes(|| text, c![0, 11; 0, 11 => "\ndream"]);
|
||||
assert_eq!(text, "quick foxes\ndream");
|
||||
apply_document_changes(&mut text, c![1, 0; 1, 0 => "have "]);
|
||||
let text = apply_document_changes(|| text, c![1, 0; 1, 0 => "have "]);
|
||||
assert_eq!(text, "quick foxes\nhave dream");
|
||||
apply_document_changes(
|
||||
&mut text,
|
||||
let text = apply_document_changes(
|
||||
|| text,
|
||||
c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"],
|
||||
);
|
||||
assert_eq!(text, "the quick foxes\nhave quiet dreams\n");
|
||||
apply_document_changes(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
|
||||
let text = apply_document_changes(|| text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
|
||||
assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n");
|
||||
apply_document_changes(
|
||||
&mut text,
|
||||
let text = apply_document_changes(
|
||||
|| text,
|
||||
c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"],
|
||||
);
|
||||
assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
|
||||
apply_document_changes(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
|
||||
let text = apply_document_changes(|| text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
|
||||
assert_eq!(text, "the quick \nthey have quiet dreams\n");
|
||||
|
||||
text = String::from("❤️");
|
||||
apply_document_changes(&mut text, c![0, 0; 0, 0 => "a"]);
|
||||
let text = String::from("❤️");
|
||||
let text = apply_document_changes(|| text, c![0, 0; 0, 0 => "a"]);
|
||||
assert_eq!(text, "a❤️");
|
||||
|
||||
text = String::from("a\nb");
|
||||
apply_document_changes(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
|
||||
let text = String::from("a\nb");
|
||||
let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
|
||||
assert_eq!(text, "adcb");
|
||||
|
||||
text = String::from("a\nb");
|
||||
apply_document_changes(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
|
||||
let text = String::from("a\nb");
|
||||
let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
|
||||
assert_eq!(text, "ațc\ncb");
|
||||
}
|
||||
|
||||
|
|
|
@ -451,7 +451,7 @@ impl GlobalState {
|
|||
ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
|
||||
ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
|
||||
ProjectWorkspaceProgress::End(workspaces) => {
|
||||
self.fetch_workspaces_queue.op_completed(workspaces);
|
||||
self.fetch_workspaces_queue.op_completed(Some(workspaces));
|
||||
|
||||
let old = Arc::clone(&self.workspaces);
|
||||
self.switch_workspaces("fetched workspace".to_string());
|
||||
|
@ -759,8 +759,10 @@ impl GlobalState {
|
|||
|
||||
let vfs = &mut this.vfs.write().0;
|
||||
let file_id = vfs.file_id(&path).unwrap();
|
||||
let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap();
|
||||
apply_document_changes(&mut text, params.content_changes);
|
||||
let text = apply_document_changes(
|
||||
|| std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(),
|
||||
params.content_changes,
|
||||
);
|
||||
|
||||
vfs.set_file_contents(path, Some(text.into_bytes()));
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use vfs::VfsPath;
|
|||
|
||||
/// Holds the set of in-memory documents.
|
||||
///
|
||||
/// For these document, there true contents is maintained by the client. It
|
||||
/// For these document, their true contents is maintained by the client. It
|
||||
/// might be different from what's on disk.
|
||||
#[derive(Default, Clone)]
|
||||
pub(crate) struct MemDocs {
|
||||
|
@ -19,6 +19,7 @@ impl MemDocs {
|
|||
pub(crate) fn contains(&self, path: &VfsPath) -> bool {
|
||||
self.mem_docs.contains_key(path)
|
||||
}
|
||||
|
||||
pub(crate) fn insert(&mut self, path: VfsPath, data: DocumentData) -> Result<(), ()> {
|
||||
self.added_or_removed = true;
|
||||
match self.mem_docs.insert(path, data) {
|
||||
|
@ -26,6 +27,7 @@ impl MemDocs {
|
|||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn remove(&mut self, path: &VfsPath) -> Result<(), ()> {
|
||||
self.added_or_removed = true;
|
||||
match self.mem_docs.remove(path) {
|
||||
|
@ -33,17 +35,21 @@ impl MemDocs {
|
|||
None => Err(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get(&self, path: &VfsPath) -> Option<&DocumentData> {
|
||||
self.mem_docs.get(path)
|
||||
}
|
||||
|
||||
pub(crate) fn get_mut(&mut self, path: &VfsPath) -> Option<&mut DocumentData> {
|
||||
// NB: don't set `self.added_or_removed` here, as that purposefully only
|
||||
// tracks changes to the key set.
|
||||
self.mem_docs.get_mut(path)
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = &VfsPath> {
|
||||
self.mem_docs.keys()
|
||||
}
|
||||
|
||||
pub(crate) fn take_changes(&mut self) -> bool {
|
||||
mem::replace(&mut self.added_or_removed, false)
|
||||
}
|
||||
|
|
|
@ -106,6 +106,14 @@ impl GlobalState {
|
|||
status.health = lsp_ext::Health::Error;
|
||||
status.message = Some(error)
|
||||
}
|
||||
|
||||
if self.config.linked_projects().is_empty()
|
||||
&& self.config.detached_files().is_empty()
|
||||
&& self.config.notifications().cargo_toml_not_found
|
||||
{
|
||||
status.health = lsp_ext::Health::Warning;
|
||||
status.message = Some("Workspace reload required".to_string())
|
||||
}
|
||||
status
|
||||
}
|
||||
|
||||
|
@ -198,12 +206,9 @@ impl GlobalState {
|
|||
self.show_and_log_error("failed to run build scripts".to_string(), Some(error));
|
||||
}
|
||||
|
||||
let workspaces = self
|
||||
.fetch_workspaces_queue
|
||||
.last_op_result()
|
||||
.iter()
|
||||
.filter_map(|res| res.as_ref().ok().cloned())
|
||||
.collect::<Vec<_>>();
|
||||
let Some(workspaces) = self.fetch_workspaces_queue.last_op_result() else { return; };
|
||||
let workspaces =
|
||||
workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
|
||||
|
||||
fn eq_ignore_build_data<'a>(
|
||||
left: &'a ProjectWorkspace,
|
||||
|
@ -300,9 +305,6 @@ impl GlobalState {
|
|||
let files_config = self.config.files();
|
||||
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
|
||||
|
||||
let standalone_server_name =
|
||||
format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
|
||||
|
||||
if self.proc_macro_clients.is_empty() {
|
||||
if let Some((path, path_manually_set)) = self.config.proc_macro_srv() {
|
||||
tracing::info!("Spawning proc-macro servers");
|
||||
|
@ -310,40 +312,17 @@ impl GlobalState {
|
|||
.workspaces
|
||||
.iter()
|
||||
.map(|ws| {
|
||||
let (path, args) = if path_manually_set {
|
||||
let (path, args): (_, &[_]) = if path_manually_set {
|
||||
tracing::debug!(
|
||||
"Pro-macro server path explicitly set: {}",
|
||||
path.display()
|
||||
);
|
||||
(path.clone(), vec![])
|
||||
(path.clone(), &[])
|
||||
} else {
|
||||
let mut sysroot_server = None;
|
||||
if let ProjectWorkspace::Cargo { sysroot, .. }
|
||||
| ProjectWorkspace::Json { sysroot, .. } = ws
|
||||
{
|
||||
if let Some(sysroot) = sysroot.as_ref() {
|
||||
let server_path = sysroot
|
||||
.root()
|
||||
.join("libexec")
|
||||
.join(&standalone_server_name);
|
||||
if std::fs::metadata(&server_path).is_ok() {
|
||||
tracing::debug!(
|
||||
"Sysroot proc-macro server exists at {}",
|
||||
server_path.display()
|
||||
);
|
||||
sysroot_server = Some(server_path);
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Sysroot proc-macro server does not exist at {}",
|
||||
server_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
match ws.find_sysroot_proc_macro_srv() {
|
||||
Some(server_path) => (server_path, &[]),
|
||||
None => (path.clone(), &["proc-macro"]),
|
||||
}
|
||||
sysroot_server.map_or_else(
|
||||
|| (path.clone(), vec!["proc-macro".to_owned()]),
|
||||
|path| (path, vec![]),
|
||||
)
|
||||
};
|
||||
|
||||
tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
|
||||
|
@ -427,9 +406,14 @@ impl GlobalState {
|
|||
fn fetch_workspace_error(&self) -> Result<(), String> {
|
||||
let mut buf = String::new();
|
||||
|
||||
for ws in self.fetch_workspaces_queue.last_op_result() {
|
||||
if let Err(err) = ws {
|
||||
stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
|
||||
let Some(last_op_result) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
|
||||
if last_op_result.is_empty() {
|
||||
stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
|
||||
} else {
|
||||
for ws in last_op_result {
|
||||
if let Err(err) = ws {
|
||||
stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ use crate::{
|
|||
line_index::{LineEndings, LineIndex, PositionEncoding},
|
||||
lsp_ext,
|
||||
lsp_utils::invalid_params_error,
|
||||
semantic_tokens, Result,
|
||||
semantic_tokens,
|
||||
};
|
||||
|
||||
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
||||
|
@ -429,7 +429,7 @@ pub(crate) fn inlay_hint(
|
|||
line_index: &LineIndex,
|
||||
render_colons: bool,
|
||||
mut inlay_hint: InlayHint,
|
||||
) -> Result<lsp_types::InlayHint> {
|
||||
) -> Cancellable<lsp_types::InlayHint> {
|
||||
match inlay_hint.kind {
|
||||
InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"),
|
||||
InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "),
|
||||
|
@ -440,32 +440,35 @@ pub(crate) fn inlay_hint(
|
|||
Ok(lsp_types::InlayHint {
|
||||
position: match inlay_hint.kind {
|
||||
// before annotated thing
|
||||
InlayKind::ParameterHint
|
||||
| InlayKind::ImplicitReborrowHint
|
||||
| InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()),
|
||||
InlayKind::ParameterHint | InlayKind::AdjustmentHint | InlayKind::BindingModeHint => {
|
||||
position(line_index, inlay_hint.range.start())
|
||||
}
|
||||
// after annotated thing
|
||||
InlayKind::ClosureReturnTypeHint
|
||||
| InlayKind::TypeHint
|
||||
| InlayKind::ChainingHint
|
||||
| InlayKind::GenericParamListHint
|
||||
| InlayKind::AdjustmentHintClosingParenthesis
|
||||
| InlayKind::LifetimeHint
|
||||
| InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()),
|
||||
},
|
||||
padding_left: Some(match inlay_hint.kind {
|
||||
InlayKind::TypeHint => !render_colons,
|
||||
InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true,
|
||||
InlayKind::BindingModeHint
|
||||
InlayKind::AdjustmentHintClosingParenthesis
|
||||
| InlayKind::BindingModeHint
|
||||
| InlayKind::ClosureReturnTypeHint
|
||||
| InlayKind::GenericParamListHint
|
||||
| InlayKind::ImplicitReborrowHint
|
||||
| InlayKind::AdjustmentHint
|
||||
| InlayKind::LifetimeHint
|
||||
| InlayKind::ParameterHint => false,
|
||||
}),
|
||||
padding_right: Some(match inlay_hint.kind {
|
||||
InlayKind::ChainingHint
|
||||
InlayKind::AdjustmentHintClosingParenthesis
|
||||
| InlayKind::ChainingHint
|
||||
| InlayKind::ClosureReturnTypeHint
|
||||
| InlayKind::GenericParamListHint
|
||||
| InlayKind::ImplicitReborrowHint
|
||||
| InlayKind::AdjustmentHint
|
||||
| InlayKind::TypeHint
|
||||
| InlayKind::ClosingBraceHint => false,
|
||||
InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"),
|
||||
|
@ -476,10 +479,11 @@ pub(crate) fn inlay_hint(
|
|||
InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => {
|
||||
Some(lsp_types::InlayHintKind::TYPE)
|
||||
}
|
||||
InlayKind::BindingModeHint
|
||||
InlayKind::AdjustmentHintClosingParenthesis
|
||||
| InlayKind::BindingModeHint
|
||||
| InlayKind::GenericParamListHint
|
||||
| InlayKind::LifetimeHint
|
||||
| InlayKind::ImplicitReborrowHint
|
||||
| InlayKind::AdjustmentHint
|
||||
| InlayKind::ClosingBraceHint => None,
|
||||
},
|
||||
text_edits: None,
|
||||
|
@ -518,7 +522,7 @@ pub(crate) fn inlay_hint(
|
|||
fn inlay_hint_label(
|
||||
snap: &GlobalStateSnapshot,
|
||||
label: InlayHintLabel,
|
||||
) -> Result<lsp_types::InlayHintLabel> {
|
||||
) -> Cancellable<lsp_types::InlayHintLabel> {
|
||||
Ok(match label.as_simple_str() {
|
||||
Some(s) => lsp_types::InlayHintLabel::String(s.into()),
|
||||
None => lsp_types::InlayHintLabel::LabelParts(
|
||||
|
@ -536,7 +540,7 @@ fn inlay_hint_label(
|
|||
command: None,
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?,
|
||||
.collect::<Cancellable<Vec<_>>>()?,
|
||||
),
|
||||
})
|
||||
}
|
||||
|
@ -794,7 +798,7 @@ pub(crate) fn optional_versioned_text_document_identifier(
|
|||
pub(crate) fn location(
|
||||
snap: &GlobalStateSnapshot,
|
||||
frange: FileRange,
|
||||
) -> Result<lsp_types::Location> {
|
||||
) -> Cancellable<lsp_types::Location> {
|
||||
let url = url(snap, frange.file_id);
|
||||
let line_index = snap.file_line_index(frange.file_id)?;
|
||||
let range = range(&line_index, frange.range);
|
||||
|
@ -806,7 +810,7 @@ pub(crate) fn location(
|
|||
pub(crate) fn location_from_nav(
|
||||
snap: &GlobalStateSnapshot,
|
||||
nav: NavigationTarget,
|
||||
) -> Result<lsp_types::Location> {
|
||||
) -> Cancellable<lsp_types::Location> {
|
||||
let url = url(snap, nav.file_id);
|
||||
let line_index = snap.file_line_index(nav.file_id)?;
|
||||
let range = range(&line_index, nav.full_range);
|
||||
|
@ -818,7 +822,7 @@ pub(crate) fn location_link(
|
|||
snap: &GlobalStateSnapshot,
|
||||
src: Option<FileRange>,
|
||||
target: NavigationTarget,
|
||||
) -> Result<lsp_types::LocationLink> {
|
||||
) -> Cancellable<lsp_types::LocationLink> {
|
||||
let origin_selection_range = match src {
|
||||
Some(src) => {
|
||||
let line_index = snap.file_line_index(src.file_id)?;
|
||||
|
@ -840,7 +844,7 @@ pub(crate) fn location_link(
|
|||
fn location_info(
|
||||
snap: &GlobalStateSnapshot,
|
||||
target: NavigationTarget,
|
||||
) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
|
||||
) -> Cancellable<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
|
||||
let line_index = snap.file_line_index(target.file_id)?;
|
||||
|
||||
let target_uri = url(snap, target.file_id);
|
||||
|
@ -854,12 +858,12 @@ pub(crate) fn goto_definition_response(
|
|||
snap: &GlobalStateSnapshot,
|
||||
src: Option<FileRange>,
|
||||
targets: Vec<NavigationTarget>,
|
||||
) -> Result<lsp_types::GotoDefinitionResponse> {
|
||||
) -> Cancellable<lsp_types::GotoDefinitionResponse> {
|
||||
if snap.config.location_link() {
|
||||
let links = targets
|
||||
.into_iter()
|
||||
.map(|nav| location_link(snap, src, nav))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
.collect::<Cancellable<Vec<_>>>()?;
|
||||
Ok(links.into())
|
||||
} else {
|
||||
let locations = targets
|
||||
|
@ -867,7 +871,7 @@ pub(crate) fn goto_definition_response(
|
|||
.map(|nav| {
|
||||
location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
.collect::<Cancellable<Vec<_>>>()?;
|
||||
Ok(locations.into())
|
||||
}
|
||||
}
|
||||
|
@ -881,7 +885,7 @@ pub(crate) fn snippet_text_document_edit(
|
|||
is_snippet: bool,
|
||||
file_id: FileId,
|
||||
edit: TextEdit,
|
||||
) -> Result<lsp_ext::SnippetTextDocumentEdit> {
|
||||
) -> Cancellable<lsp_ext::SnippetTextDocumentEdit> {
|
||||
let text_document = optional_versioned_text_document_identifier(snap, file_id);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let mut edits: Vec<_> =
|
||||
|
@ -958,7 +962,7 @@ pub(crate) fn snippet_text_document_ops(
|
|||
pub(crate) fn snippet_workspace_edit(
|
||||
snap: &GlobalStateSnapshot,
|
||||
source_change: SourceChange,
|
||||
) -> Result<lsp_ext::SnippetWorkspaceEdit> {
|
||||
) -> Cancellable<lsp_ext::SnippetWorkspaceEdit> {
|
||||
let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
|
||||
|
||||
for op in source_change.file_system_edits {
|
||||
|
@ -995,7 +999,7 @@ pub(crate) fn snippet_workspace_edit(
|
|||
pub(crate) fn workspace_edit(
|
||||
snap: &GlobalStateSnapshot,
|
||||
source_change: SourceChange,
|
||||
) -> Result<lsp_types::WorkspaceEdit> {
|
||||
) -> Cancellable<lsp_types::WorkspaceEdit> {
|
||||
assert!(!source_change.is_snippet);
|
||||
snippet_workspace_edit(snap, source_change).map(|it| it.into())
|
||||
}
|
||||
|
@ -1048,7 +1052,7 @@ impl From<lsp_ext::SnippetTextEdit>
|
|||
pub(crate) fn call_hierarchy_item(
|
||||
snap: &GlobalStateSnapshot,
|
||||
target: NavigationTarget,
|
||||
) -> Result<lsp_types::CallHierarchyItem> {
|
||||
) -> Cancellable<lsp_types::CallHierarchyItem> {
|
||||
let name = target.name.to_string();
|
||||
let detail = target.description.clone();
|
||||
let kind = target.kind.map(symbol_kind).unwrap_or(lsp_types::SymbolKind::FUNCTION);
|
||||
|
@ -1080,7 +1084,7 @@ pub(crate) fn code_action(
|
|||
snap: &GlobalStateSnapshot,
|
||||
assist: Assist,
|
||||
resolve_data: Option<(usize, lsp_types::CodeActionParams)>,
|
||||
) -> Result<lsp_ext::CodeAction> {
|
||||
) -> Cancellable<lsp_ext::CodeAction> {
|
||||
let mut res = lsp_ext::CodeAction {
|
||||
title: assist.label.to_string(),
|
||||
group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0),
|
||||
|
@ -1113,13 +1117,13 @@ pub(crate) fn code_action(
|
|||
pub(crate) fn runnable(
|
||||
snap: &GlobalStateSnapshot,
|
||||
runnable: Runnable,
|
||||
) -> Result<lsp_ext::Runnable> {
|
||||
) -> Cancellable<lsp_ext::Runnable> {
|
||||
let config = snap.config.runnables();
|
||||
let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
|
||||
let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
|
||||
let target = spec.as_ref().map(|s| s.target.clone());
|
||||
let (cargo_args, executable_args) =
|
||||
CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg)?;
|
||||
CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg);
|
||||
let label = runnable.label(target);
|
||||
let location = location_link(snap, None, runnable.nav)?;
|
||||
|
||||
|
@ -1142,7 +1146,7 @@ pub(crate) fn code_lens(
|
|||
acc: &mut Vec<lsp_types::CodeLens>,
|
||||
snap: &GlobalStateSnapshot,
|
||||
annotation: Annotation,
|
||||
) -> Result<()> {
|
||||
) -> Cancellable<()> {
|
||||
let client_commands_config = snap.config.client_commands();
|
||||
match annotation.kind {
|
||||
AnnotationKind::Runnable(run) => {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue