⬆️ rust-analyzer

Merge commit '368e0bb32f'
This commit is contained in:
arcnmx 2023-01-09 10:36:22 -08:00
parent b3ef934ccb
commit 25242fe93f
395 changed files with 14569 additions and 5755 deletions

View file

@ -81,9 +81,9 @@ impl Logger {
Registry::default()
.with(
self.filter
.add_directive(format!("chalk_solve={}", val).parse()?)
.add_directive(format!("chalk_ir={}", val).parse()?)
.add_directive(format!("chalk_recursive={}", val).parse()?),
.add_directive(format!("chalk_solve={val}").parse()?)
.add_directive(format!("chalk_ir={val}").parse()?)
.add_directive(format!("chalk_recursive={val}").parse()?),
)
.with(ra_fmt_layer)
.with(chalk_layer)
@ -124,7 +124,7 @@ where
Some(log) => log.target(),
None => event.metadata().target(),
};
write!(writer, "[{} {}] ", level, target)?;
write!(writer, "[{level} {target}] ")?;
// Write spans and fields of each span
ctx.visit_spans(|span| {
@ -140,7 +140,7 @@ where
let fields = &ext.get::<FormattedFields<N>>().expect("will never be `None`");
if !fields.is_empty() {
write!(writer, "{{{}}}", fields)?;
write!(writer, "{{{fields}}}")?;
}
write!(writer, ": ")?;

View file

@ -30,7 +30,7 @@ fn main() {
let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) {
Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102),
Err(err) => {
eprintln!("{}", err);
eprintln!("{err}");
101
}
};
@ -40,7 +40,7 @@ fn main() {
let flags = flags::RustAnalyzer::from_env_or_exit();
if let Err(err) = try_main(flags) {
tracing::error!("Unexpected error: {}", err);
eprintln!("{}", err);
eprintln!("{err}");
process::exit(101);
}
}
@ -183,6 +183,8 @@ fn run_server() -> Result<()> {
}
}
config.client_specific_adjustments(&initialize_params.client_info);
let server_capabilities = rust_analyzer::server_capabilities(&config);
let initialize_result = lsp_types::InitializeResult {

View file

@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
"(".to_string(),
]),
all_commit_characters: None,
completion_item: completion_item(&config),
completion_item: completion_item(config),
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
}),
signature_help_provider: Some(SignatureHelpOptions {
@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
},
document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
first_trigger_character: "=".to_string(),
more_trigger_character: Some(more_trigger_character(&config)),
more_trigger_character: Some(more_trigger_character(config)),
}),
selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),

View file

@ -46,7 +46,7 @@ fn report_metric(metric: &str, value: u64, unit: &str) {
if std::env::var("RA_METRICS").is_err() {
return;
}
println!("METRIC:{}:{}:{}", metric, value, unit)
println!("METRIC:{metric}:{value}:{unit}")
}
fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
@ -65,6 +65,6 @@ fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
for (name, bytes) in mem {
// NOTE: Not a debug print, so avoid going through the `eprintln` defined above.
eprintln!("{:>8} {}", bytes, name);
eprintln!("{bytes:>8} {name}");
}
}

View file

@ -87,9 +87,9 @@ impl flags::AnalysisStats {
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
let db = host.raw_database();
eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
eprint!(" (metadata {}", metadata_time);
eprint!(" (metadata {metadata_time}");
if let Some(build_scripts_time) = build_scripts_time {
eprint!("; build {}", build_scripts_time);
eprint!("; build {build_scripts_time}");
}
eprintln!(")");
@ -118,7 +118,7 @@ impl flags::AnalysisStats {
shuffle(&mut rng, &mut visit_queue);
}
eprint!(" crates: {}", num_crates);
eprint!(" crates: {num_crates}");
let mut num_decls = 0;
let mut funcs = Vec::new();
while let Some(module) = visit_queue.pop() {
@ -142,7 +142,7 @@ impl flags::AnalysisStats {
}
}
}
eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len());
eprintln!(", mods: {}, decls: {num_decls}, fns: {}", visited_modules.len(), funcs.len());
eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
if self.randomize {
@ -154,7 +154,7 @@ impl flags::AnalysisStats {
}
let total_span = analysis_sw.elapsed();
eprintln!("{:<20} {}", "Total:", total_span);
eprintln!("{:<20} {total_span}", "Total:");
report_metric("total time", total_span.time.as_millis() as u64, "ms");
if let Some(instructions) = total_span.instructions {
report_metric("total instructions", instructions, "#instr");
@ -179,7 +179,7 @@ impl flags::AnalysisStats {
total_macro_file_size += syntax_len(val.syntax_node())
}
}
eprintln!("source files: {}, macro files: {}", total_file_size, total_macro_file_size);
eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
}
if self.memory_usage && verbosity.is_verbose() {
@ -239,7 +239,7 @@ impl flags::AnalysisStats {
continue;
}
}
let mut msg = format!("processing: {}", full_name);
let mut msg = format!("processing: {full_name}");
if verbosity.is_verbose() {
if let Some(src) = f.source(db) {
let original_file = src.file_id.original_file(db);
@ -275,7 +275,7 @@ impl flags::AnalysisStats {
end.col,
));
} else {
bar.println(format!("{}: Unknown type", name,));
bar.println(format!("{name}: Unknown type",));
}
}
true
@ -402,7 +402,7 @@ fn location_csv(
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col)
format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
}
fn expr_syntax_range(

View file

@ -40,7 +40,7 @@ impl flags::Diagnostics {
if !visited_files.contains(&file_id) {
let crate_name =
module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string();
println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id));
println!("processing crate: {crate_name}, module: {}", _vfs.file_path(file_id));
for diagnostic in analysis
.diagnostics(
&DiagnosticsConfig::test_sample(),
@ -53,7 +53,7 @@ impl flags::Diagnostics {
found_error = true;
}
println!("{:?}", diagnostic);
println!("{diagnostic:?}");
}
visited_files.insert(file_id);

View file

@ -255,7 +255,7 @@ impl FromStr for OutputFormat {
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"csv" => Ok(Self::Csv),
_ => Err(format!("unknown output format `{}`", s)),
_ => Err(format!("unknown output format `{s}`")),
}
}
}

View file

@ -8,7 +8,7 @@ impl flags::Highlight {
pub fn run(self) -> anyhow::Result<()> {
let (analysis, file_id) = Analysis::from_single_file(read_stdin()?);
let html = analysis.highlight_as_html(file_id, self.rainbow).unwrap();
println!("{}", html);
println!("{html}");
Ok(())
}
}

View file

@ -83,7 +83,7 @@ impl LsifManager<'_> {
// FIXME: support file in addition to stdout here
fn emit(&self, data: &str) {
println!("{}", data);
println!("{data}");
}
fn get_token_id(&mut self, id: TokenId) -> Id {
@ -253,7 +253,7 @@ impl LsifManager<'_> {
};
let result = folds
.into_iter()
.map(|it| to_proto::folding_range(&*text, &line_index, false, it))
.map(|it| to_proto::folding_range(&text, &line_index, false, it))
.collect();
let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result });
self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData {

View file

@ -67,7 +67,7 @@ impl ProgressReport {
return;
}
let percent = (self.curr * 100.0) as u32;
let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg);
let text = format!("{}/{} {percent:3>}% {}", self.pos, self.len, self.msg);
self.update_text(&text);
}
@ -114,7 +114,7 @@ impl ProgressReport {
// Fill all last text to space and return the cursor
let spaces = " ".repeat(self.text.len());
let backspaces = "\x08".repeat(self.text.len());
print!("{}{}{}", backspaces, spaces, backspaces);
print!("{backspaces}{spaces}{backspaces}");
let _ = io::stdout().flush();
self.text = String::new();

View file

@ -28,7 +28,7 @@ impl flags::Scip {
let now = Instant::now();
let cargo_config = CargoConfig::default();
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s));
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro: true,
@ -102,7 +102,7 @@ impl flags::Scip {
let symbol = tokens_to_symbol
.entry(id)
.or_insert_with(|| {
let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol);
let symbol = token_to_symbol(token).unwrap_or_else(&mut new_local_symbol);
scip::symbol::format_symbol(symbol)
})
.clone();
@ -176,7 +176,7 @@ fn get_relative_filepath(
rootpath: &vfs::AbsPathBuf,
file_id: ide::FileId,
) -> Option<String> {
Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string())
Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_string())
}
// SCIP Ranges have a (very large) optimization that ranges if they are on the same line
@ -209,7 +209,7 @@ fn new_descriptor_str(
fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
let mut name = name.to_string();
if name.contains("'") {
name = format!("`{}`", name);
name = format!("`{name}`");
}
new_descriptor_str(name.as_str(), suffix)
@ -303,11 +303,11 @@ mod test {
}
if expected == "" {
assert!(found_symbol.is_none(), "must have no symbols {:?}", found_symbol);
assert!(found_symbol.is_none(), "must have no symbols {found_symbol:?}");
return;
}
assert!(found_symbol.is_some(), "must have one symbol {:?}", found_symbol);
assert!(found_symbol.is_some(), "must have one symbol {found_symbol:?}");
let res = found_symbol.unwrap();
let formatted = format_symbol(res);
assert_eq!(formatted, expected);

View file

@ -70,7 +70,7 @@ impl flags::Search {
let sr = db.source_root(root);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) {
println!("{:#?}", debug_info);
println!("{debug_info:#?}");
}
}
}

View file

@ -9,7 +9,7 @@ impl flags::Symbols {
let (analysis, file_id) = Analysis::from_single_file(text);
let structure = analysis.file_structure(file_id).unwrap();
for s in structure {
println!("{:?}", s);
println!("{s:?}");
}
Ok(())
}

View file

@ -20,7 +20,7 @@ use ide_db::{
SnippetCap,
};
use itertools::Itertools;
use lsp_types::{ClientCapabilities, MarkupKind};
use lsp_types::{ClientCapabilities, ClientInfo, MarkupKind};
use project_model::{
CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource,
UnsetTestCrates,
@ -124,22 +124,23 @@ config_data! {
/// Unsets `#[cfg(test)]` for the specified crates.
cargo_unsetTest: Vec<String> = "[\"core\"]",
/// Run the check command for diagnostics on save.
checkOnSave | checkOnSave_enable: bool = "true",
/// Check all targets and tests (`--all-targets`).
checkOnSave_allTargets: bool = "true",
check_allTargets | checkOnSave_allTargets: bool = "true",
/// Cargo command to use for `cargo check`.
checkOnSave_command: String = "\"check\"",
/// Run specified `cargo check` command for diagnostics on save.
checkOnSave_enable: bool = "true",
check_command | checkOnSave_command: String = "\"check\"",
/// Extra arguments for `cargo check`.
checkOnSave_extraArgs: Vec<String> = "[]",
check_extraArgs | checkOnSave_extraArgs: Vec<String> = "[]",
/// Extra environment variables that will be set when running `cargo check`.
/// Extends `#rust-analyzer.cargo.extraEnv#`.
checkOnSave_extraEnv: FxHashMap<String, String> = "{}",
check_extraEnv | checkOnSave_extraEnv: FxHashMap<String, String> = "{}",
/// List of features to activate. Defaults to
/// `#rust-analyzer.cargo.features#`.
///
/// Set to `"all"` to pass `--all-features` to Cargo.
checkOnSave_features: Option<CargoFeaturesDef> = "null",
check_features | checkOnSave_features: Option<CargoFeaturesDef> = "null",
/// Specifies the working directory for running checks.
/// - "workspace": run checks for workspaces in the corresponding workspaces' root directories.
// FIXME: Ideally we would support this in some way
@ -147,19 +148,21 @@ config_data! {
/// - "root": run checks in the project's root directory.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"",
check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"",
/// Specifies the invocation strategy to use when running the checkOnSave command.
/// If `per_workspace` is set, the command will be executed for each workspace.
/// If `once` is set, the command will be executed once.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
/// Whether to pass `--no-default-features` to Cargo. Defaults to
/// `#rust-analyzer.cargo.noDefaultFeatures#`.
checkOnSave_noDefaultFeatures: Option<bool> = "null",
check_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option<bool> = "null",
/// Override the command rust-analyzer uses instead of `cargo check` for
/// diagnostics on save. The command is required to output json and
/// should therefore include `--message-format=json` or a similar option.
/// should therefore include `--message-format=json` or a similar option
/// (if your client supports the `colorDiagnosticOutput` experimental
/// capability, you can use `--message-format=json-diagnostic-rendered-ansi`).
///
/// If you're changing this because you're using some tool wrapping
/// Cargo, you might also want to change
@ -175,14 +178,14 @@ config_data! {
/// cargo check --workspace --message-format=json --all-targets
/// ```
/// .
checkOnSave_overrideCommand: Option<Vec<String>> = "null",
check_overrideCommand | checkOnSave_overrideCommand: Option<Vec<String>> = "null",
/// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
///
/// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
/// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
///
/// Aliased as `"checkOnSave.targets"`.
checkOnSave_target | checkOnSave_targets: CheckOnSaveTargets = "[]",
check_targets | checkOnSave_targets | checkOnSave_target: Option<CheckOnSaveTargets> = "null",
/// Toggles the additional completions that automatically add imports when completed.
/// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
@ -327,12 +330,20 @@ config_data! {
inlayHints_closingBraceHints_minLines: usize = "25",
/// Whether to show inlay type hints for return types of closures.
inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
/// Whether to show enum variant discriminant hints.
inlayHints_discriminantHints_enable: DiscriminantHintsDef = "\"never\"",
/// Whether to show inlay hints for type adjustments.
inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"",
/// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false",
/// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"",
/// Whether to show inlay type hints for elided lifetimes in function signatures.
inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
/// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
inlayHints_lifetimeElisionHints_useParameterNames: bool = "false",
/// Whether to use location links for parts of type mentioned in inlay hints.
inlayHints_locationLinks: bool = "true",
/// Maximum length for inlay hints. Set to null to have an unlimited length.
inlayHints_maxLength: Option<usize> = "25",
/// Whether to show function parameter name inlay hints at the call
@ -403,6 +414,9 @@ config_data! {
/// Whether to show `can't find Cargo.toml` error message.
notifications_cargoTomlNotFound: bool = "true",
/// How many worker threads in the main loop. The default `null` means to pick automatically.
numThreads: Option<usize> = "null",
/// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
procMacro_attributes_enable: bool = "true",
/// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
@ -714,6 +728,19 @@ impl Config {
}
}
pub fn client_specific_adjustments(&mut self, client_info: &Option<ClientInfo>) {
// FIXME: remove this when we drop support for vscode 1.65 and below
if let Some(client) = client_info {
if client.name.contains("Code") || client.name.contains("Codium") {
if let Some(version) = &client.version {
if version.as_str() < "1.76" {
self.data.inlayHints_locationLinks = false;
}
}
}
}
}
pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
tracing::info!("updating config from JSON: {:#}", json);
if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
@ -767,9 +794,9 @@ impl Config {
fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
use serde::de::Error;
if self.data.checkOnSave_command.is_empty() {
if self.data.check_command.is_empty() {
error_sink.push((
"/checkOnSave/command".to_string(),
"/check/command".to_string(),
serde_json::Error::custom("expected a non-empty string"),
));
}
@ -981,6 +1008,11 @@ impl Config {
self.experimental("serverStatusNotification")
}
/// Whether the client supports colored output for full diagnostics from `checkOnSave`.
pub fn color_diagnostic_output(&self) -> bool {
self.experimental("colorDiagnosticOutput")
}
pub fn publish_diagnostics(&self) -> bool {
self.data.diagnostics_enable
}
@ -1014,7 +1046,7 @@ impl Config {
pub fn check_on_save_extra_env(&self) -> FxHashMap<String, String> {
let mut extra_env = self.data.cargo_extraEnv.clone();
extra_env.extend(self.data.checkOnSave_extraEnv.clone());
extra_env.extend(self.data.check_extraEnv.clone());
extra_env
}
@ -1125,11 +1157,8 @@ impl Config {
}
}
pub fn flycheck(&self) -> Option<FlycheckConfig> {
if !self.data.checkOnSave_enable {
return None;
}
let flycheck_config = match &self.data.checkOnSave_overrideCommand {
pub fn flycheck(&self) -> FlycheckConfig {
match &self.data.check_overrideCommand {
Some(args) if !args.is_empty() => {
let mut args = args.clone();
let command = args.remove(0);
@ -1137,13 +1166,13 @@ impl Config {
command,
args,
extra_env: self.check_on_save_extra_env(),
invocation_strategy: match self.data.checkOnSave_invocationStrategy {
invocation_strategy: match self.data.check_invocationStrategy {
InvocationStrategy::Once => flycheck::InvocationStrategy::Once,
InvocationStrategy::PerWorkspace => {
flycheck::InvocationStrategy::PerWorkspace
}
},
invocation_location: match self.data.checkOnSave_invocationLocation {
invocation_location: match self.data.check_invocationLocation {
InvocationLocation::Root => {
flycheck::InvocationLocation::Root(self.root_path.clone())
}
@ -1152,34 +1181,43 @@ impl Config {
}
}
Some(_) | None => FlycheckConfig::CargoCommand {
command: self.data.checkOnSave_command.clone(),
target_triples: match &self.data.checkOnSave_target.0[..] {
[] => self.data.cargo_target.clone().into_iter().collect(),
targets => targets.into(),
},
all_targets: self.data.checkOnSave_allTargets,
command: self.data.check_command.clone(),
target_triples: self
.data
.check_targets
.clone()
.and_then(|targets| match &targets.0[..] {
[] => None,
targets => Some(targets.into()),
})
.unwrap_or_else(|| self.data.cargo_target.clone().into_iter().collect()),
all_targets: self.data.check_allTargets,
no_default_features: self
.data
.checkOnSave_noDefaultFeatures
.check_noDefaultFeatures
.unwrap_or(self.data.cargo_noDefaultFeatures),
all_features: matches!(
self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features),
self.data.check_features.as_ref().unwrap_or(&self.data.cargo_features),
CargoFeaturesDef::All
),
features: match self
.data
.checkOnSave_features
.check_features
.clone()
.unwrap_or_else(|| self.data.cargo_features.clone())
{
CargoFeaturesDef::All => vec![],
CargoFeaturesDef::Selected(it) => it,
},
extra_args: self.data.checkOnSave_extraArgs.clone(),
extra_args: self.data.check_extraArgs.clone(),
extra_env: self.check_on_save_extra_env(),
ansi_color_output: self.color_diagnostic_output(),
},
};
Some(flycheck_config)
}
}
pub fn check_on_save(&self) -> bool {
self.data.checkOnSave
}
pub fn runnables(&self) -> RunnablesConfig {
@ -1191,10 +1229,16 @@ impl Config {
pub fn inlay_hints(&self) -> InlayHintsConfig {
InlayHintsConfig {
location_links: self.data.inlayHints_locationLinks,
render_colons: self.data.inlayHints_renderColons,
type_hints: self.data.inlayHints_typeHints_enable,
parameter_hints: self.data.inlayHints_parameterHints_enable,
chaining_hints: self.data.inlayHints_chainingHints_enable,
discriminant_hints: match self.data.inlayHints_discriminantHints_enable {
DiscriminantHintsDef::Always => ide::DiscriminantHints::Always,
DiscriminantHintsDef::Never => ide::DiscriminantHints::Never,
DiscriminantHintsDef::Fieldless => ide::DiscriminantHints::Fieldless,
},
closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable {
ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
@ -1219,6 +1263,15 @@ impl Config {
},
AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly,
},
adjustment_hints_mode: match self.data.inlayHints_expressionAdjustmentHints_mode {
AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix,
AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix,
AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix,
AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix,
},
adjustment_hints_hide_outside_unsafe: self
.data
.inlayHints_expressionAdjustmentHints_hideOutsideUnsafe,
binding_mode_hints: self.data.inlayHints_bindingModeHints_enable,
param_names_for_lifetime_elision_hints: self
.data
@ -1449,6 +1502,10 @@ impl Config {
}
}
pub fn main_loop_num_threads(&self) -> usize {
self.data.numThreads.unwrap_or(num_cpus::get_physical().try_into().unwrap_or(1))
}
pub fn typing_autoclose_angle(&self) -> bool {
self.data.typing_autoClosingAngleBrackets_enable
}
@ -1553,6 +1610,7 @@ mod de_unit_v {
named_unit_variant!(skip_trivial);
named_unit_variant!(mutable);
named_unit_variant!(reborrow);
named_unit_variant!(fieldless);
named_unit_variant!(with_block);
}
@ -1716,6 +1774,26 @@ enum AdjustmentHintsDef {
Reborrow,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(untagged)]
enum DiscriminantHintsDef {
#[serde(deserialize_with = "true_or_always")]
Always,
#[serde(deserialize_with = "false_or_never")]
Never,
#[serde(deserialize_with = "de_unit_v::fieldless")]
Fieldless,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum AdjustmentHintsModeDef {
Prefix,
Postfix,
PreferPrefix,
PreferPostfix,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum FilesWatcherDef {
@ -1817,40 +1895,35 @@ fn get_field<T: DeserializeOwned>(
alias: Option<&'static str>,
default: &str,
) -> T {
let default = serde_json::from_str(default).unwrap();
// XXX: check alias first, to work-around the VS Code where it pre-fills the
// defaults instead of sending an empty object.
alias
.into_iter()
.chain(iter::once(field))
.find_map(move |field| {
.filter_map(move |field| {
let mut pointer = field.replace('_', "/");
pointer.insert(0, '/');
json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) {
Ok(it) => Some(it),
Err(e) => {
tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
error_sink.push((pointer, e));
None
}
})
json.pointer_mut(&pointer)
.map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer)))
})
.unwrap_or(default)
.find(Result::is_ok)
.and_then(|res| match res {
Ok(it) => Some(it),
Err((e, pointer)) => {
tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
error_sink.push((pointer, e));
None
}
})
.unwrap_or_else(|| serde_json::from_str(default).unwrap())
}
fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) {
fn key(f: &str) -> &str {
f.splitn(2, '_').next().unwrap()
}
assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2);
}
let map = fields
.iter()
.map(|(field, ty, doc, default)| {
let name = field.replace('_', ".");
let name = format!("rust-analyzer.{}", name);
let name = format!("rust-analyzer.{name}");
let props = field_props(field, ty, doc, default);
(name, props)
})
@ -1863,9 +1936,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
let doc = doc.trim_end_matches('\n');
assert!(
doc.ends_with('.') && doc.starts_with(char::is_uppercase),
"bad docs for {}: {:?}",
field,
doc
"bad docs for {field}: {doc:?}"
);
let default = default.parse::<serde_json::Value>().unwrap();
@ -1921,15 +1992,6 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"type": ["null", "array"],
"items": { "type": "string" },
},
"MergeBehaviorDef" => set! {
"type": "string",
"enum": ["none", "crate", "module"],
"enumDescriptions": [
"Do not merge imports at all.",
"Merge imports from the same crate into a single `use` statement.",
"Merge imports from the same module into a single `use` statement."
],
},
"ExprFillDefaultDef" => set! {
"type": "string",
"enum": ["todo", "default"],
@ -2038,6 +2100,34 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"Only show auto borrow and dereference adjustment hints."
]
},
"DiscriminantHintsDef" => set! {
"type": "string",
"enum": [
"always",
"never",
"fieldless"
],
"enumDescriptions": [
"Always show all discriminant hints.",
"Never show discriminant hints.",
"Only show discriminant hints on fieldless enum variants."
]
},
"AdjustmentHintsModeDef" => set! {
"type": "string",
"enum": [
"prefix",
"postfix",
"prefer_prefix",
"prefer_postfix",
],
"enumDescriptions": [
"Always show adjustment hints as prefix (`*expr`).",
"Always show adjustment hints as postfix (`expr.*`).",
"Show prefix or postfix depending on which uses less parenthesis, prefering prefix.",
"Show prefix or postfix depending on which uses less parenthesis, prefering postfix.",
]
},
"CargoFeaturesDef" => set! {
"anyOf": [
{
@ -2126,8 +2216,11 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"The command will be executed in the project root."
],
},
"CheckOnSaveTargets" => set! {
"Option<CheckOnSaveTargets>" => set! {
"anyOf": [
{
"type": "null"
},
{
"type": "string",
},
@ -2137,7 +2230,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
},
],
},
_ => panic!("missing entry for {}: {}", ty, default),
_ => panic!("missing entry for {ty}: {default}"),
}
map.into()
@ -2149,30 +2242,29 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
.iter()
.map(|(field, _ty, doc, default)| {
let name = format!("rust-analyzer.{}", field.replace('_', "."));
let doc = doc_comment_to_string(*doc);
let doc = doc_comment_to_string(doc);
if default.contains('\n') {
format!(
r#"[[{}]]{}::
r#"[[{name}]]{name}::
+
--
Default:
----
{}
{default}
----
{}
{doc}
--
"#,
name, name, default, doc
"#
)
} else {
format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")
}
})
.collect::<String>()
}
fn doc_comment_to_string(doc: &[&str]) -> String {
doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect()
doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{it}\n")).collect()
}
#[cfg(test)]
@ -2186,7 +2278,7 @@ mod tests {
#[test]
fn generate_package_json_config() {
let s = Config::json_schema();
let schema = format!("{:#}", s);
let schema = format!("{s:#}");
let mut schema = schema
.trim_start_matches('{')
.trim_end_matches('}')

View file

@ -4,6 +4,9 @@ use serde_json::{json, Value};
/// This function patches the json config to the new expected keys.
/// That is we try to load old known config keys here and convert them to the new ones.
/// See https://github.com/rust-lang/rust-analyzer/pull/12010
///
/// We already have an alias system for simple cases, but if we make structural changes
/// the alias infra fails down.
pub(super) fn patch_json_for_outdated_configs(json: &mut Value) {
let copy = json.clone();
@ -105,9 +108,9 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) {
merge(json, json!({ "cargo": { "features": "all" } }));
}
// checkOnSave_allFeatures, checkOnSave_features -> checkOnSave_features
// checkOnSave_allFeatures, checkOnSave_features -> check_features
if let Some(Value::Bool(true)) = copy.pointer("/checkOnSave/allFeatures") {
merge(json, json!({ "checkOnSave": { "features": "all" } }));
merge(json, json!({ "check": { "features": "all" } }));
}
// completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets
@ -116,11 +119,21 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) {
copy.pointer("/completion/addCallParenthesis"),
) {
(Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"),
(Some(Value::Bool(true)), _) => json!("add_parentheses"),
(_, Some(Value::Bool(true))) => json!("add_parentheses"),
(Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"),
(_, _) => return,
};
merge(json, json!({ "completion": { "callable": {"snippets": res }} }));
// We need to do this due to the checkOnSave_enable -> checkOnSave change, as that key now can either be an object or a bool
// checkOnSave_* -> check_*
if let Some(Value::Object(obj)) = copy.pointer("/checkOnSave") {
// checkOnSave_enable -> checkOnSave
if let Some(b @ Value::Bool(_)) = obj.get("enable") {
merge(json, json!({ "checkOnSave": b }));
}
merge(json, json!({ "check": obj }));
}
}
fn merge(dst: &mut Value, src: Value) {

View file

@ -101,8 +101,7 @@ impl DiagnosticCollection {
file_id: FileId,
) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native = self.native.get(&file_id).into_iter().flatten();
let check =
self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
native.chain(check)
}

View file

@ -161,7 +161,7 @@ fn resolve_path(
.iter()
.find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name)))
{
Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)),
Some((to, file_name)) => workspace_root.join(format!("{to}{file_name}")),
None => workspace_root.join(file_name),
}
}
@ -191,6 +191,7 @@ fn map_rust_child_diagnostic(
let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new();
let mut suggested_replacements = Vec::new();
let mut is_preferred = true;
for &span in &spans {
if let Some(suggested_replacement) = &span.suggested_replacement {
if !suggested_replacement.is_empty() {
@ -209,6 +210,8 @@ fn map_rust_child_diagnostic(
) {
edit_map.entry(location.uri).or_default().push(edit);
}
is_preferred &=
matches!(span.suggestion_applicability, Some(Applicability::MachineApplicable));
}
}
@ -218,7 +221,7 @@ fn map_rust_child_diagnostic(
if !suggested_replacements.is_empty() {
message.push_str(": ");
let suggestions =
suggested_replacements.iter().map(|suggestion| format!("`{}`", suggestion)).join(", ");
suggested_replacements.iter().map(|suggestion| format!("`{suggestion}`")).join(", ");
message.push_str(&suggestions);
}
@ -251,7 +254,7 @@ fn map_rust_child_diagnostic(
document_changes: None,
change_annotations: None,
}),
is_preferred: Some(true),
is_preferred: Some(is_preferred),
data: None,
command: None,
},
@ -493,7 +496,7 @@ fn rustc_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescripti
&& chars.next().is_none()
})
.and_then(|code| {
lsp_types::Url::parse(&format!("https://doc.rust-lang.org/error-index.html#{}", code))
lsp_types::Url::parse(&format!("https://doc.rust-lang.org/error-index.html#{code}"))
.ok()
.map(|href| lsp_types::CodeDescription { href })
})
@ -502,8 +505,7 @@ fn rustc_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescripti
fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
code.and_then(|code| {
lsp_types::Url::parse(&format!(
"https://rust-lang.github.io/rust-clippy/master/index.html#{}",
code
"https://rust-lang.github.io/rust-clippy/master/index.html#{code}"
))
.ok()
.map(|href| lsp_types::CodeDescription { href })

View file

@ -145,7 +145,7 @@ impl<'a> RequestDispatcher<'a> {
match res {
Ok(params) => {
let panic_context =
format!("\nversion: {}\nrequest: {} {:#?}", version(), R::METHOD, params);
format!("\nversion: {}\nrequest: {} {params:#?}", version(), R::METHOD);
Some((req, params, panic_context))
}
Err(err) => {

View file

@ -25,12 +25,9 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
let line_col = match line_index.encoding {
PositionEncoding::Utf8 => {
LineCol { line: position.line as u32, col: position.character as u32 }
}
PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character },
PositionEncoding::Utf16 => {
let line_col =
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
let line_col = LineColUtf16 { line: position.line, col: position.character };
line_index.index.to_utf8(line_col)
}
};
@ -67,7 +64,15 @@ pub(crate) fn file_range(
text_document_identifier: lsp_types::TextDocumentIdentifier,
range: lsp_types::Range,
) -> Result<FileRange> {
let file_id = file_id(snap, &text_document_identifier.uri)?;
file_range_uri(snap, &text_document_identifier.uri, range)
}
pub(crate) fn file_range_uri(
snap: &GlobalStateSnapshot,
document: &lsp_types::Url,
range: lsp_types::Range,
) -> Result<FileRange> {
let file_id = file_id(snap, document)?;
let line_index = snap.file_line_index(file_id)?;
let range = text_range(&line_index, range)?;
Ok(FileRange { file_id, range })

View file

@ -134,7 +134,7 @@ impl GlobalState {
let task_pool = {
let (sender, receiver) = unbounded();
let handle = TaskPool::new(sender);
let handle = TaskPool::new_with_threads(sender, config.main_loop_num_threads());
Handle { handle, receiver }
};
@ -429,6 +429,6 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
let path = from_proto::vfs_path(url)?;
let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {path}"))?;
Ok(res)
}

View file

@ -28,7 +28,8 @@ use lsp_types::{
use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
use serde_json::json;
use stdx::{format_to, never};
use syntax::{algo, ast, AstNode, TextRange, TextSize, T};
use syntax::{algo, ast, AstNode, TextRange, TextSize};
use tracing::error;
use vfs::AbsPathBuf;
use crate::{
@ -729,7 +730,7 @@ pub(crate) fn handle_runnables(
Some(spec) => {
for cmd in ["check", "test"] {
res.push(lsp_ext::Runnable {
label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
label: format!("cargo {cmd} -p {} --all-targets", spec.package),
location: None,
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
@ -812,18 +813,6 @@ pub(crate) fn handle_completion(
let completion_trigger_character =
params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
if Some(':') == completion_trigger_character {
let source_file = snap.analysis.parse(position.file_id)?;
let left_token = source_file.syntax().token_at_offset(position.offset).left_biased();
let completion_triggered_after_single_colon = match left_token {
Some(left_token) => left_token.kind() == T![:],
None => true,
};
if completion_triggered_after_single_colon {
return Ok(None);
}
}
let completion_config = &snap.config.completion();
let items = match snap.analysis.completions(
completion_config,
@ -910,7 +899,7 @@ pub(crate) fn handle_folding_range(
let line_folding_only = snap.config.line_folding_only();
let res = folds
.into_iter()
.map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
.map(|it| to_proto::folding_range(&text, &line_index, line_folding_only, it))
.collect();
Ok(Some(res))
}
@ -990,7 +979,7 @@ pub(crate) fn handle_rename(
let position = from_proto::file_position(&snap, params.text_document_position)?;
let mut change =
snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?;
snap.analysis.rename(position, &params.new_name)?.map_err(to_proto::rename_error)?;
// this is kind of a hack to prevent double edits from happening when moving files
// When a module gets renamed by renaming the mod declaration this causes the file to move
@ -1112,9 +1101,7 @@ pub(crate) fn handle_code_action(
}
// Fixes from `cargo check`.
for fix in
snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
{
for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() {
// FIXME: this mapping is awkward and shouldn't exist. Refactor
// `snap.check_fixes` to not convert to LSP prematurely.
let intersect_fix_range = fix
@ -1157,8 +1144,8 @@ pub(crate) fn handle_code_action_resolve(
Ok(parsed_data) => parsed_data,
Err(e) => {
return Err(invalid_params_error(format!(
"Failed to parse action id string '{}': {}",
params.id, e
"Failed to parse action id string '{}': {e}",
params.id
))
.into())
}
@ -1202,7 +1189,7 @@ fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
let assist_kind: AssistKind = assist_kind_string.parse()?;
let index: usize = match index_string.parse() {
Ok(index) => index,
Err(e) => return Err(format!("Incorrect index string: {}", e)),
Err(e) => return Err(format!("Incorrect index string: {e}")),
};
Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
}
@ -1384,9 +1371,26 @@ pub(crate) fn handle_inlay_hints_resolve(
let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
let file_range = from_proto::file_range(
match snap.url_file_version(&resolve_data.text_document.uri) {
Some(version) if version == resolve_data.text_document.version => {}
Some(version) => {
error!(
"attempted inlayHints/resolve of '{}' at version {} while server version is {}",
resolve_data.text_document.uri, resolve_data.text_document.version, version,
);
return Ok(hint);
}
None => {
error!(
"attempted inlayHints/resolve of unknown file '{}' at version {}",
resolve_data.text_document.uri, resolve_data.text_document.version,
);
return Ok(hint);
}
}
let file_range = from_proto::file_range_uri(
&snap,
resolve_data.text_document,
&resolve_data.text_document.uri,
match resolve_data.position {
PositionOrRange::Position(pos) => Range::new(pos, pos),
PositionOrRange::Range(range) => range,
@ -1782,14 +1786,15 @@ fn run_rustfmt(
let file_id = from_proto::file_id(snap, &text_document.uri)?;
let file = snap.analysis.file_text(file_id)?;
// find the edition of the package the file belongs to
// (if it belongs to multiple we'll just pick the first one and pray)
let edition = snap
// Determine the edition of the crate the file belongs to (if there's multiple, we pick the
// highest edition).
let editions = snap
.analysis
.relevant_crates_for(file_id)?
.into_iter()
.find_map(|crate_id| snap.cargo_target_for_crate_root(crate_id))
.map(|(ws, target)| ws[ws[target].package].edition);
.map(|crate_id| snap.analysis.crate_edition(crate_id))
.collect::<Result<Vec<_>, _>>()?;
let edition = editions.iter().copied().max();
let line_index = snap.file_line_index(file_id)?;
@ -1863,7 +1868,7 @@ fn run_rustfmt(
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.context(format!("Failed to spawn {:?}", command))?;
.context(format!("Failed to spawn {command:?}"))?;
rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
@ -1896,9 +1901,9 @@ fn run_rustfmt(
format!(
r#"rustfmt exited with:
Status: {}
stdout: {}
stderr: {}"#,
output.status, captured_stdout, captured_stderr,
stdout: {captured_stdout}
stderr: {captured_stderr}"#,
output.status,
),
)
.into())

View file

@ -48,7 +48,7 @@ fn integrated_highlighting_benchmark() {
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
};
{
@ -102,7 +102,7 @@ fn integrated_completion_benchmark() {
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
};
{

View file

@ -55,7 +55,7 @@ pub type Result<T, E = Error> = std::result::Result<T, E>;
pub fn from_json<T: DeserializeOwned>(what: &'static str, json: &serde_json::Value) -> Result<T> {
let res = serde_json::from_value(json.clone())
.map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?;
.map_err(|e| format!("Failed to deserialize {what}: {e}; {json}"))?;
Ok(res)
}

View file

@ -3,11 +3,11 @@
use std::{collections::HashMap, path::PathBuf};
use lsp_types::request::Request;
use lsp_types::PositionEncodingKind;
use lsp_types::{
notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
};
use lsp_types::{PositionEncodingKind, VersionedTextDocumentIdentifier};
use serde::{Deserialize, Serialize};
pub enum AnalyzerStatus {}
@ -132,12 +132,31 @@ pub struct ExpandedMacro {
pub enum CancelFlycheck {}
impl Request for CancelFlycheck {
impl Notification for CancelFlycheck {
type Params = ();
type Result = ();
const METHOD: &'static str = "rust-analyzer/cancelFlycheck";
}
pub enum RunFlycheck {}
impl Notification for RunFlycheck {
type Params = RunFlycheckParams;
const METHOD: &'static str = "rust-analyzer/runFlycheck";
}
pub enum ClearFlycheck {}
impl Notification for ClearFlycheck {
type Params = ();
const METHOD: &'static str = "rust-analyzer/clearFlycheck";
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct RunFlycheckParams {
pub text_document: Option<TextDocumentIdentifier>,
}
pub enum MatchingBrace {}
impl Request for MatchingBrace {
@ -550,7 +569,7 @@ pub struct CompletionResolveData {
#[derive(Debug, Serialize, Deserialize)]
pub struct InlayHintResolveData {
pub text_document: TextDocumentIdentifier,
pub text_document: VersionedTextDocumentIdentifier,
pub position: PositionOrRange,
}

View file

@ -98,7 +98,7 @@ impl GlobalState {
});
let cancellable = Some(cancel_token.is_some());
let token = lsp_types::ProgressToken::String(
cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{}", title)),
cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")),
);
let work_done_progress = match state {
Progress::Begin => {

View file

@ -229,8 +229,8 @@ impl GlobalState {
message = match &report.crates_currently_indexing[..] {
[crate_name] => Some(format!(
"{}/{} ({})",
report.crates_done, report.crates_total, crate_name
"{}/{} ({crate_name})",
report.crates_done, report.crates_total
)),
[crate_name, rest @ ..] => Some(format!(
"{}/{} ({} + {} more)",
@ -414,10 +414,7 @@ impl GlobalState {
let loop_duration = loop_start.elapsed();
if loop_duration > Duration::from_millis(100) && was_quiescent {
tracing::warn!("overly long loop turn: {:?}", loop_duration);
self.poke_rust_analyzer_developer(format!(
"overly long loop turn: {:?}",
loop_duration
));
self.poke_rust_analyzer_developer(format!("overly long loop turn: {loop_duration:?}"));
}
Ok(())
}
@ -516,7 +513,7 @@ impl GlobalState {
self.report_progress(
"Roots Scanned",
state,
Some(format!("{}/{}", n_done, n_total)),
Some(format!("{n_done}/{n_total}")),
Some(Progress::fraction(n_done, n_total)),
None,
)
@ -561,10 +558,7 @@ impl GlobalState {
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
flycheck::Progress::DidCancel => (Progress::End, None),
flycheck::Progress::DidFailToRestart(err) => {
self.show_and_log_error(
"cargo check failed".to_string(),
Some(err.to_string()),
);
self.show_and_log_error("cargo check failed".to_string(), Some(err));
return;
}
flycheck::Progress::DidFinish(result) => {
@ -581,10 +575,7 @@ impl GlobalState {
// When we're running multiple flychecks, we have to include a disambiguator in
// the title, or the editor complains. Note that this is a user-facing string.
let title = if self.flycheck.len() == 1 {
match self.config.flycheck() {
Some(config) => format!("{}", config),
None => "cargo check".to_string(),
}
format!("{}", self.config.flycheck())
} else {
format!("cargo check (#{})", id + 1)
};
@ -593,7 +584,7 @@ impl GlobalState {
state,
message,
None,
Some(format!("rust-analyzer/checkOnSave/{}", id)),
Some(format!("rust-analyzer/flycheck/{id}")),
);
}
}
@ -638,7 +629,6 @@ impl GlobalState {
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
.on_sync_mut::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)
.on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
.on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
.on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
@ -703,6 +693,88 @@ impl GlobalState {
/// Handles an incoming notification.
fn on_notification(&mut self, not: Notification) -> Result<()> {
// FIXME: Move these implementations out into a module similar to on_request
fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool {
let file_id = this.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = this.snapshot();
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Trigger flychecks for all workspaces that depend on the saved file
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = world
.analysis
.crates_for(file_id)?
.into_iter()
.flat_map(|id| world.analysis.transitive_rev_deps(id))
.flatten()
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
world
.analysis
.crate_root(crate_id)
.map(|file_id| {
world
.file_id_to_file_path(file_id)
.as_path()
.map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> =
crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids =
world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| {
crate_root_paths.contains(&cargo[it].root.as_path())
})
})
}
project_model::ProjectWorkspace::Json { project, .. } => project
.crates()
.any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in world.flycheck.iter() {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.restart();
continue;
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
if !updated {
for flycheck in world.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
};
this.task_pool.handle.spawn_with_sender(move |_| {
if let Err(e) = std::panic::catch_unwind(task) {
tracing::error!("flycheck task panicked: {e:?}")
}
});
true
} else {
false
}
}
NotificationDispatcher { not: Some(not), global_state: self }
.on::<lsp_types::notification::Cancel>(|this, params| {
let id: lsp_server::RequestId = match params.id {
@ -714,7 +786,7 @@ impl GlobalState {
})?
.on::<lsp_types::notification::WorkDoneProgressCancel>(|this, params| {
if let lsp_types::NumberOrString::String(s) = &params.token {
if let Some(id) = s.strip_prefix("rust-analyzer/checkOnSave/") {
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
if let Ok(id) = u32::from_str_radix(id, 10) {
if let Some(flycheck) = this.flycheck.get(id as usize) {
flycheck.cancel();
@ -743,6 +815,7 @@ impl GlobalState {
}
Ok(())
})?
.on::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)?
.on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
match this.mem_docs.get_mut(&path) {
@ -782,99 +855,42 @@ impl GlobalState {
}
Ok(())
})?
.on::<lsp_ext::ClearFlycheck>(|this, ()| {
this.diagnostics.clear_check_all();
Ok(())
})?
.on::<lsp_ext::RunFlycheck>(|this, params| {
if let Some(text_document) = params.text_document {
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
if run_flycheck(this, vfs_path) {
return Ok(());
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in this.flycheck.iter() {
flycheck.restart();
}
Ok(())
})?
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
}
}
let file_id = this.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = this.snapshot();
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Trigger flychecks for all workspaces that depend on the saved file
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = world
.analysis
.crates_for(file_id)?
.into_iter()
.flat_map(|id| world.analysis.transitive_rev_deps(id))
.flatten()
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
world
.analysis
.crate_root(crate_id)
.map(|file_id| {
world
.file_id_to_file_path(file_id)
.as_path()
.map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> =
crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids =
world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| {
crate_root_paths.contains(&cargo[it].root.as_path())
})
})
}
project_model::ProjectWorkspace::Json { project, .. } => {
project.crates().any(|(c, _)| {
crate_ids.iter().any(|&crate_id| crate_id == c)
})
}
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in world.flycheck.iter() {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.restart();
continue;
}
}
}
// No specific flycheck was triggered, so let's trigger all of them.
if !updated {
for flycheck in world.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
};
this.task_pool.handle.spawn_with_sender(move |_| {
if let Err(e) = std::panic::catch_unwind(task) {
tracing::error!("DidSaveTextDocument flycheck task panicked: {e:?}")
}
});
if !this.config.check_on_save() || run_flycheck(this, vfs_path) {
return Ok(());
}
}
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in this.flycheck.iter() {
flycheck.restart();
} else if this.config.check_on_save() {
// No specific flycheck was triggered, so let's trigger all of them.
for flycheck in this.flycheck.iter() {
flycheck.restart();
}
}
Ok(())
})?

View file

@ -158,8 +158,10 @@ impl GlobalState {
.collect::<Vec<_>>();
if !detached_files.is_empty() {
workspaces
.push(project_model::ProjectWorkspace::load_detached_files(detached_files));
workspaces.push(project_model::ProjectWorkspace::load_detached_files(
detached_files,
&cargo_config,
));
}
tracing::info!("did fetch workspaces {:?}", workspaces);
@ -224,6 +226,7 @@ impl GlobalState {
build_scripts: _,
toolchain: _,
target_layout: _,
} => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
_ => None,
};
@ -447,15 +450,7 @@ impl GlobalState {
fn reload_flycheck(&mut self) {
let _p = profile::span("GlobalState::reload_flycheck");
let config = match self.config.flycheck() {
Some(it) => it,
None => {
self.flycheck = Arc::new([]);
self.diagnostics.clear_check_all();
return;
}
};
let config = self.config.flycheck();
let sender = self.flycheck_sender.clone();
let invocation_strategy = match config {
FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace,
@ -466,7 +461,7 @@ impl GlobalState {
flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn(
0,
Box::new(move |msg| sender.send(msg).unwrap()),
config.clone(),
config,
self.config.root_path().clone(),
)],
flycheck::InvocationStrategy::PerWorkspace => {

View file

@ -161,8 +161,8 @@ impl SemanticTokensBuilder {
/// Push a new token onto the builder
pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
let mut push_line = range.start.line as u32;
let mut push_char = range.start.character as u32;
let mut push_line = range.start.line;
let mut push_char = range.start.character;
if !self.data.is_empty() {
push_line -= self.prev_line;
@ -177,15 +177,15 @@ impl SemanticTokensBuilder {
let token = SemanticToken {
delta_line: push_line,
delta_start: push_char,
length: token_len as u32,
length: token_len,
token_type: token_index,
token_modifiers_bitset: modifier_bitset,
};
self.data.push(token);
self.prev_line = range.start.line as u32;
self.prev_char = range.start.character as u32;
self.prev_line = range.start.line;
self.prev_char = range.start.character;
}
pub(crate) fn build(self) -> SemanticTokens {

View file

@ -8,12 +8,13 @@ pub(crate) struct TaskPool<T> {
}
impl<T> TaskPool<T> {
pub(crate) fn new(sender: Sender<T>) -> TaskPool<T> {
pub(crate) fn new_with_threads(sender: Sender<T>, threads: usize) -> TaskPool<T> {
const STACK_SIZE: usize = 8 * 1024 * 1024;
let inner = threadpool::Builder::new()
.thread_name("Worker".into())
.thread_stack_size(STACK_SIZE)
.num_threads(threads)
.build();
TaskPool { sender, inner }
}

View file

@ -228,7 +228,7 @@ fn completion_item(
max_relevance: u32,
item: CompletionItem,
) {
let insert_replace_support = config.insert_replace_support().then(|| tdpp.position);
let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
let mut additional_text_edits = Vec::new();
// LSP does not allow arbitrary edits in completion, so we have to do a
@ -258,7 +258,7 @@ fn completion_item(
text_edit.unwrap()
};
let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET);
let insert_text_format = item.is_snippet().then_some(lsp_types::InsertTextFormat::SNIPPET);
let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
Some(command::trigger_parameter_hints())
@ -342,7 +342,7 @@ fn completion_item(
// by the client. Hex format is used because it is easier to
// visually compare very large values, which the sort text
// tends to be since it is the opposite of the score.
res.sort_text = Some(format!("{:08x}", sort_score));
res.sort_text = Some(format!("{sort_score:08x}"));
}
}
@ -434,42 +434,52 @@ pub(crate) fn inlay_hint(
InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"),
InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "),
InlayKind::ClosureReturnTypeHint => inlay_hint.label.prepend_str(" -> "),
InlayKind::DiscriminantHint => inlay_hint.label.prepend_str(" = "),
_ => {}
}
Ok(lsp_types::InlayHint {
position: match inlay_hint.kind {
// before annotated thing
InlayKind::ParameterHint | InlayKind::AdjustmentHint | InlayKind::BindingModeHint => {
position(line_index, inlay_hint.range.start())
}
InlayKind::OpeningParenthesis
| InlayKind::ParameterHint
| InlayKind::AdjustmentHint
| InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()),
// after annotated thing
InlayKind::ClosureReturnTypeHint
| InlayKind::TypeHint
| InlayKind::DiscriminantHint
| InlayKind::ChainingHint
| InlayKind::GenericParamListHint
| InlayKind::AdjustmentHintClosingParenthesis
| InlayKind::ClosingParenthesis
| InlayKind::AdjustmentHintPostfix
| InlayKind::LifetimeHint
| InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()),
},
padding_left: Some(match inlay_hint.kind {
InlayKind::TypeHint => !render_colons,
InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true,
InlayKind::AdjustmentHintClosingParenthesis
InlayKind::ClosingParenthesis
| InlayKind::DiscriminantHint
| InlayKind::OpeningParenthesis
| InlayKind::BindingModeHint
| InlayKind::ClosureReturnTypeHint
| InlayKind::GenericParamListHint
| InlayKind::AdjustmentHint
| InlayKind::AdjustmentHintPostfix
| InlayKind::LifetimeHint
| InlayKind::ParameterHint => false,
}),
padding_right: Some(match inlay_hint.kind {
InlayKind::AdjustmentHintClosingParenthesis
InlayKind::ClosingParenthesis
| InlayKind::OpeningParenthesis
| InlayKind::ChainingHint
| InlayKind::ClosureReturnTypeHint
| InlayKind::GenericParamListHint
| InlayKind::AdjustmentHint
| InlayKind::AdjustmentHintPostfix
| InlayKind::TypeHint
| InlayKind::DiscriminantHint
| InlayKind::ClosingBraceHint => false,
InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"),
InlayKind::ParameterHint | InlayKind::LifetimeHint => true,
@ -479,11 +489,14 @@ pub(crate) fn inlay_hint(
InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => {
Some(lsp_types::InlayHintKind::TYPE)
}
InlayKind::AdjustmentHintClosingParenthesis
InlayKind::ClosingParenthesis
| InlayKind::DiscriminantHint
| InlayKind::OpeningParenthesis
| InlayKind::BindingModeHint
| InlayKind::GenericParamListHint
| InlayKind::LifetimeHint
| InlayKind::AdjustmentHint
| InlayKind::AdjustmentHintPostfix
| InlayKind::ClosingBraceHint => None,
},
text_edits: None,
@ -492,7 +505,10 @@ pub(crate) fn inlay_hint(
let uri = url(snap, file_id);
let line_index = snap.file_line_index(file_id).ok()?;
let text_document = lsp_types::TextDocumentIdentifier { uri };
let text_document = lsp_types::VersionedTextDocumentIdentifier {
version: snap.url_file_version(&uri)?,
uri,
};
to_value(lsp_ext::InlayHintResolveData {
text_document,
position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)),
@ -501,7 +517,10 @@ pub(crate) fn inlay_hint(
}
Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => {
let uri = url(snap, file_id);
let text_document = lsp_types::TextDocumentIdentifier { uri };
let text_document = lsp_types::VersionedTextDocumentIdentifier {
version: snap.url_file_version(&uri)?,
uri,
};
let line_index = snap.file_line_index(file_id).ok()?;
to_value(lsp_ext::InlayHintResolveData {
text_document,
@ -1103,7 +1122,7 @@ pub(crate) fn code_action(
(Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
(None, Some((index, code_action_params))) => {
res.data = Some(lsp_ext::CodeActionData {
id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index),
id: format!("{}:{}:{index}", assist.id.0, assist.id.1.name()),
code_action_params,
});
}
@ -1164,7 +1183,10 @@ pub(crate) fn code_lens(
let r = runnable(snap, run)?;
let lens_config = snap.config.lens();
if lens_config.run && client_commands_config.run_single {
if lens_config.run
&& client_commands_config.run_single
&& r.args.workspace_root.is_some()
{
let command = command::run_single(&r, &title);
acc.push(lsp_types::CodeLens {
range: annotation_range,
@ -1339,7 +1361,7 @@ pub(crate) fn implementation_title(count: usize) -> String {
if count == 1 {
"1 implementation".into()
} else {
format!("{} implementations", count)
format!("{count} implementations")
}
}
@ -1347,7 +1369,7 @@ pub(crate) fn reference_title(count: usize) -> String {
if count == 1 {
"1 reference".into()
} else {
format!("{} references", count)
format!("{count} references")
}
}

View file

@ -263,7 +263,7 @@ mod tests {
for runnable in ["consumer", "dependency", "devdependency"] {
server.request::<Runnables>(
RunnablesParams {
text_document: server.doc_id(&format!("{}/src/lib.rs", runnable)),
text_document: server.doc_id(&format!("{runnable}/src/lib.rs")),
position: None,
},
json!([
@ -528,14 +528,13 @@ fn test_missing_module_code_action_in_json_project() {
let code = format!(
r#"
//- /rust-project.json
{PROJECT}
{project}
//- /src/lib.rs
mod bar;
fn main() {{}}
"#,
PROJECT = project,
);
let server =
@ -595,8 +594,8 @@ fn diagnostics_dont_block_typing() {
return;
}
let librs: String = (0..10).map(|i| format!("mod m{};", i)).collect();
let libs: String = (0..10).map(|i| format!("//- /src/m{}.rs\nfn foo() {{}}\n\n", i)).collect();
let librs: String = (0..10).map(|i| format!("mod m{i};")).collect();
let libs: String = (0..10).map(|i| format!("//- /src/m{i}.rs\nfn foo() {{}}\n\n")).collect();
let server = Project::with_fixture(&format!(
r#"
//- /Cargo.toml
@ -605,13 +604,12 @@ name = "foo"
version = "0.0.0"
//- /src/lib.rs
{}
{librs}
{}
{libs}
fn main() {{}}
"#,
librs, libs
"#
))
.with_config(serde_json::json!({
"cargo": { "sysroot": "discover" }
@ -622,7 +620,7 @@ fn main() {{}}
for i in 0..10 {
server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: server.doc_id(&format!("src/m{}.rs", i)).uri,
uri: server.doc_id(&format!("src/m{i}.rs")).uri,
language_id: "rust".to_string(),
version: 0,
text: "/// Docs\nfn foo() {}".to_string(),
@ -645,7 +643,7 @@ fn main() {{}}
}]),
);
let elapsed = start.elapsed();
assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed);
assert!(elapsed.as_millis() < 2000, "typing enter took {elapsed:?}");
}
#[test]
@ -942,7 +940,7 @@ fn test_will_rename_files_same_level() {
let tmp_dir = TestDir::new();
let tmp_dir_path = tmp_dir.path().to_owned();
let tmp_dir_str = tmp_dir_path.to_str().unwrap();
let base_path = PathBuf::from(format!("file://{}", tmp_dir_str));
let base_path = PathBuf::from(format!("file://{tmp_dir_str}"));
let code = r#"
//- /Cargo.toml

View file

@ -14,7 +14,7 @@ fn sourcegen_feature_docs() {
contents.trim()
);
let dst = sourcegen::project_root().join("docs/user/generated_features.adoc");
fs::write(&dst, &contents).unwrap();
fs::write(dst, contents).unwrap();
}
#[derive(Debug)]
@ -42,7 +42,7 @@ impl Feature {
for block in comment_blocks {
let id = block.id;
if let Err(msg) = is_valid_feature_name(&id) {
panic!("invalid feature name: {:?}:\n {}", id, msg)
panic!("invalid feature name: {id:?}:\n {msg}")
}
let doc = block.contents.join("\n");
let location = sourcegen::Location { file: path.clone(), line: block.line };
@ -63,11 +63,11 @@ fn is_valid_feature_name(feature: &str) -> Result<(), String> {
}
for short in ["To", "And"] {
if word == short {
return Err(format!("Don't capitalize {:?}", word));
return Err(format!("Don't capitalize {word:?}"));
}
}
if !word.starts_with(char::is_uppercase) {
return Err(format!("Capitalize {:?}", word));
return Err(format!("Capitalize {word:?}"));
}
}
Ok(())

View file

@ -216,7 +216,7 @@ impl Server {
fn send_request_(&self, r: Request) -> Value {
let id = r.id.clone();
self.client.sender.send(r.clone().into()).unwrap();
while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {:?}", r)) {
while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {r:?}")) {
match msg {
Message::Request(req) => {
if req.method == "client/registerCapability" {
@ -228,19 +228,19 @@ impl Server {
continue;
}
}
panic!("unexpected request: {:?}", req)
panic!("unexpected request: {req:?}")
}
Message::Notification(_) => (),
Message::Response(res) => {
assert_eq!(res.id, id);
if let Some(err) = res.error {
panic!("error response: {:#?}", err);
panic!("error response: {err:#?}");
}
return res.result.unwrap();
}
}
}
panic!("no response for {:?}", r);
panic!("no response for {r:?}");
}
pub(crate) fn wait_until_workspace_is_loaded(self) -> Server {
self.wait_for_message_cond(1, &|msg: &Message| match msg {

View file

@ -28,7 +28,7 @@ impl TestDir {
static CNT: AtomicUsize = AtomicUsize::new(0);
for _ in 0..100 {
let cnt = CNT.fetch_add(1, Ordering::Relaxed);
let path = base.join(format!("{}_{}", pid, cnt));
let path = base.join(format!("{pid}_{cnt}"));
if path.is_dir() {
continue;
}
@ -53,7 +53,7 @@ impl Drop for TestDir {
return;
}
remove_dir_all(&self.path).unwrap_or_else(|err| {
panic!("failed to remove temporary directory {}: {}", self.path.display(), err)
panic!("failed to remove temporary directory {}: {err}", self.path.display())
})
}
}

View file

@ -56,12 +56,11 @@ fn check_lsp_extensions_docs() {
"
lsp_ext.rs was changed without touching lsp-extensions.md.
Expected hash: {:x}
Actual hash: {:x}
Expected hash: {expected_hash:x}
Actual hash: {actual_hash:x}
Please adjust docs/dev/lsp-extensions.md.
",
expected_hash, actual_hash
"
)
}
}
@ -194,6 +193,7 @@ MIT OR Apache-2.0
MIT OR Apache-2.0 OR Zlib
MIT OR Zlib OR Apache-2.0
MIT/Apache-2.0
Unlicense OR MIT
Unlicense/MIT
Zlib OR Apache-2.0 OR MIT
"
@ -216,18 +216,18 @@ Zlib OR Apache-2.0 OR MIT
diff.push_str("New Licenses:\n");
for &l in licenses.iter() {
if !expected.contains(&l) {
diff += &format!(" {}\n", l)
diff += &format!(" {l}\n")
}
}
diff.push_str("\nMissing Licenses:\n");
for &l in expected.iter() {
if !licenses.contains(&l) {
diff += &format!(" {}\n", l)
diff += &format!(" {l}\n")
}
}
panic!("different set of licenses!\n{}", diff);
panic!("different set of licenses!\n{diff}");
}
assert_eq!(licenses, expected);
}
@ -316,7 +316,7 @@ fn check_test_attrs(path: &Path, text: &str) {
"ide-assists/src/tests/generated.rs",
];
if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),)
panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),)
}
let panic_rule =
@ -438,7 +438,7 @@ impl TidyMarks {
self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
if !diff.is_empty() {
panic!("unpaired marks: {:?}", diff)
panic!("unpaired marks: {diff:?}")
}
}
}