Lint debug prints and disallowed types with clippy

This commit is contained in:
Lukas Wirth 2024-02-01 16:16:38 +01:00
parent 850ba2fb63
commit 9e8a0fae0c
64 changed files with 170 additions and 229 deletions

View file

@ -3,7 +3,9 @@
//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(clippy::print_stdout, clippy::print_stderr)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[cfg(feature = "in-rust-tree")]
extern crate rustc_driver as _;

View file

@ -1,5 +1,7 @@
//! Various batch processing tasks, intended primarily for debugging.
#![allow(clippy::print_stdout, clippy::print_stderr)]
mod analysis_stats;
mod diagnostics;
pub mod flags;

View file

@ -1,6 +1,5 @@
//! LSIF (language server index format) generator
use std::collections::HashMap;
use std::env;
use std::time::Instant;
@ -16,6 +15,7 @@ use ide_db::{
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use lsp_types::{self, lsif};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rustc_hash::FxHashMap;
use vfs::{AbsPathBuf, Vfs};
use crate::{
@ -35,10 +35,10 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
struct LsifManager<'a> {
count: i32,
token_map: HashMap<TokenId, Id>,
range_map: HashMap<FileRange, Id>,
file_map: HashMap<FileId, Id>,
package_map: HashMap<PackageInformation, Id>,
token_map: FxHashMap<TokenId, Id>,
range_map: FxHashMap<FileRange, Id>,
file_map: FxHashMap<FileId, Id>,
package_map: FxHashMap<PackageInformation, Id>,
analysis: &'a Analysis,
db: &'a RootDatabase,
vfs: &'a Vfs,
@ -57,10 +57,10 @@ impl LsifManager<'_> {
fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> {
LsifManager {
count: 0,
token_map: HashMap::default(),
range_map: HashMap::default(),
file_map: HashMap::default(),
package_map: HashMap::default(),
token_map: FxHashMap::default(),
range_map: FxHashMap::default(),
file_map: FxHashMap::default(),
package_map: FxHashMap::default(),
analysis,
db,
vfs,
@ -215,7 +215,7 @@ impl LsifManager<'_> {
out_v: result_set_id.into(),
}));
let mut edges = token.references.iter().fold(
HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
FxHashMap::<_, Vec<lsp_types::NumberOrString>>::default(),
|mut edges, it| {
let entry = edges.entry((it.range.file_id, it.is_definition)).or_default();
entry.push((*self.range_map.get(&it.range).unwrap()).into());

View file

@ -1,8 +1,6 @@
//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
use std::{
cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf,
};
use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf};
use hir::{Change, Crate};
use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
@ -10,6 +8,7 @@ use profile::StopWatch;
use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use rustc_hash::FxHashMap;
use triomphe::Arc;
use vfs::{AbsPathBuf, FileId};
use walkdir::WalkDir;
@ -27,7 +26,7 @@ struct Tester {
fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
thread_local! {
static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new());
static LEAK_STORE: RefCell<FxHashMap<String, DiagnosticCode>> = RefCell::new(FxHashMap::default());
}
LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
Some(c) => *c,
@ -39,9 +38,9 @@ fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
})
}
fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> {
fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> FxHashMap<DiagnosticCode, usize> {
let text = read_to_string(p).unwrap();
let mut result = HashMap::new();
let mut result = FxHashMap::default();
{
let mut text = &*text;
while let Some(p) = text.find("error[E") {
@ -106,7 +105,7 @@ impl Tester {
let expected = if stderr_path.exists() {
detect_errors_from_rustc_stderr_file(stderr_path)
} else {
HashMap::new()
FxHashMap::default()
};
let text = read_to_string(&p).unwrap();
let mut change = Change::new();
@ -125,7 +124,7 @@ impl Tester {
self.host.apply_change(change);
let diagnostic_config = DiagnosticsConfig::test_sample();
let mut actual = HashMap::new();
let mut actual = FxHashMap::default();
let panicked = match std::panic::catch_unwind(|| {
self.host
.analysis()

View file

@ -1,10 +1,6 @@
//! SCIP generator
use std::{
collections::{HashMap, HashSet},
path::PathBuf,
time::Instant,
};
use std::{path::PathBuf, time::Instant};
use ide::{
LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile,
@ -12,6 +8,7 @@ use ide::{
};
use ide_db::LineIndexDatabase;
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use rustc_hash::{FxHashMap, FxHashSet};
use scip::types as scip_types;
use crate::{
@ -76,9 +73,10 @@ impl flags::Scip {
};
let mut documents = Vec::new();
let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
let mut tokens_to_enclosing_symbol: HashMap<TokenId, Option<String>> = HashMap::new();
let mut symbols_emitted: FxHashSet<TokenId> = FxHashSet::default();
let mut tokens_to_symbol: FxHashMap<TokenId, String> = FxHashMap::default();
let mut tokens_to_enclosing_symbol: FxHashMap<TokenId, Option<String>> =
FxHashMap::default();
for StaticIndexedFile { file_id, tokens, .. } in si.files {
let mut local_count = 0;

View file

@ -1,9 +1,9 @@
//! This module provides the functionality needed to convert diagnostics from
//! `cargo check` json format to the LSP diagnostic format.
use std::collections::HashMap;
use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
use itertools::Itertools;
use rustc_hash::FxHashMap;
use stdx::format_to;
use vfs::{AbsPath, AbsPathBuf};
@ -186,7 +186,7 @@ fn map_rust_child_diagnostic(
return MappedRustChildDiagnostic::MessageLine(rd.message.clone());
}
let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new();
let mut edit_map: FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = FxHashMap::default();
let mut suggested_replacements = Vec::new();
let mut is_preferred = true;
for &span in &spans {

View file

@ -1,6 +1,8 @@
//! rust-analyzer extensions to the LSP.
use std::{collections::HashMap, path::PathBuf};
#![allow(clippy::disallowed_types)]
use std::path::PathBuf;
use ide_db::line_index::WideEncoding;
use lsp_types::request::Request;
@ -9,6 +11,7 @@ use lsp_types::{
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
};
use lsp_types::{PositionEncodingKind, Url};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use crate::line_index::PositionEncoding;
@ -448,12 +451,16 @@ pub struct CodeActionData {
#[serde(rename_all = "camelCase")]
pub struct SnippetWorkspaceEdit {
#[serde(skip_serializing_if = "Option::is_none")]
pub changes: Option<HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
pub changes: Option<FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub document_changes: Option<Vec<SnippetDocumentChangeOperation>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub change_annotations:
Option<HashMap<lsp_types::ChangeAnnotationIdentifier, lsp_types::ChangeAnnotation>>,
pub change_annotations: Option<
std::collections::HashMap<
lsp_types::ChangeAnnotationIdentifier,
lsp_types::ChangeAnnotation,
>,
>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]

View file

@ -179,6 +179,7 @@ impl Node {
self.go(0, filter)
}
#[allow(clippy::print_stderr)]
fn go(&self, level: usize, filter: &WriteFilter) {
if self.duration > filter.longer_than && level < filter.depth {
let duration = ms(self.duration);

View file

@ -9,6 +9,7 @@
//! be sure without a real client anyway.
#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(clippy::disallowed_types)]
#[cfg(not(feature = "in-rust-tree"))]
mod sourcegen;

View file

@ -1,3 +1,4 @@
#![allow(clippy::disallowed_types, clippy::print_stderr)]
use std::{
collections::HashSet,
path::{Path, PathBuf},
@ -78,8 +79,6 @@ fn files_are_tidy() {
match extension {
"rs" => {
let text = sh.read_file(&path).unwrap();
check_todo(&path, &text);
check_dbg(&path, &text);
check_test_attrs(&path, &text);
check_trailing_ws(&path, &text);
tidy_docs.visit(&path, &text);
@ -205,74 +204,6 @@ Zlib OR Apache-2.0 OR MIT
assert_eq!(licenses, expected);
}
fn check_todo(path: &Path, text: &str) {
let need_todo = &[
// This file itself obviously needs to use todo (<- like this!).
"tests/tidy.rs",
// Some of our assists generate `todo!()`.
"handlers/add_turbo_fish.rs",
"handlers/generate_function.rs",
"handlers/add_missing_match_arms.rs",
"handlers/replace_derive_with_manual_impl.rs",
// To support generating `todo!()` in assists, we have `expr_todo()` in
// `ast::make`.
"ast/make.rs",
// The documentation in string literals may contain anything for its own purposes
"ide-db/src/generated/lints.rs",
"ide-assists/src/utils/gen_trait_fn_body.rs",
"ide-assists/src/tests/generated.rs",
// The tests for missing fields
"ide-diagnostics/src/handlers/missing_fields.rs",
];
if need_todo.iter().any(|p| path.ends_with(p)) {
return;
}
if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
// Generated by an assist
if text.contains("${0:todo!()}") {
return;
}
panic!(
"\nTODO markers or todo! macros should not be committed to the master branch,\n\
use FIXME instead\n\
{}\n",
path.display(),
)
}
}
fn check_dbg(path: &Path, text: &str) {
let need_dbg = &[
// This file itself obviously needs to use dbg.
"slow-tests/tidy.rs",
// Assists to remove `dbg!()`
"handlers/remove_dbg.rs",
// We have .dbg postfix
"ide-completion/src/completions/postfix.rs",
"ide-completion/src/completions/keyword.rs",
"ide-completion/src/tests/expression.rs",
"ide-completion/src/tests/proc_macros.rs",
// The documentation in string literals may contain anything for its own purposes
"ide-completion/src/lib.rs",
"ide-db/src/generated/lints.rs",
// test for doc test for remove_dbg
"src/tests/generated.rs",
// `expect!` string can contain `dbg!` (due to .dbg postfix)
"ide-completion/src/tests/special.rs",
];
if need_dbg.iter().any(|p| path.ends_with(p)) {
return;
}
if text.contains("dbg!") {
panic!(
"\ndbg! macros should not be committed to the master branch,\n\
{}\n",
path.display(),
)
}
}
fn check_test_attrs(path: &Path, text: &str) {
let ignore_rule =
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";