Remove limit crate in favor usize

This commit is contained in:
Lukas Wirth 2025-02-20 18:58:42 +01:00
parent e865b249e6
commit 0b2e8166a1
17 changed files with 23 additions and 131 deletions

9
Cargo.lock generated
View file

@ -559,7 +559,6 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"ra-ap-rustc_abi",
"ra-ap-rustc_parse_format",
@ -591,7 +590,6 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"parser",
"rustc-hash 2.0.0",
@ -626,7 +624,6 @@ dependencies = [
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"nohash-hasher",
"oorandom",
"project-model",
@ -744,7 +741,6 @@ dependencies = [
"hir",
"indexmap",
"itertools",
"limit",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
@ -943,10 +939,6 @@ dependencies = [
"redox_syscall",
]
[[package]]
name = "limit"
version = "0.0.0"
[[package]]
name = "line-index"
version = "0.1.2"
@ -1279,7 +1271,6 @@ dependencies = [
"drop_bomb",
"edition",
"expect-test",
"limit",
"ra-ap-rustc_lexer",
"stdx",
"tracing",

View file

@ -64,7 +64,6 @@ ide-db = { path = "./crates/ide-db", version = "0.0.0" }
ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
intern = { path = "./crates/intern", version = "0.0.0" }
limit = { path = "./crates/limit", version = "0.0.0" }
load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
mbe = { path = "./crates/mbe", version = "0.0.0" }
parser = { path = "./crates/parser", version = "0.0.0" }

View file

@ -43,7 +43,6 @@ hir-expand.workspace = true
mbe.workspace = true
cfg.workspace = true
tt.workspace = true
limit.workspace = true
span.workspace = true

View file

@ -9,7 +9,6 @@ use hir_expand::{
attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind,
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
};
use limit::Limit;
use span::{Edition, SyntaxContextId};
use syntax::{ast, Parse};
use triomphe::Arc;
@ -28,18 +27,18 @@ pub struct Expander {
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
recursion_depth: u32,
recursion_limit: Limit,
recursion_limit: usize,
}
impl Expander {
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
let recursion_limit = module.def_map(db).recursion_limit() as usize;
let recursion_limit = Limit::new(if cfg!(test) {
let recursion_limit = if cfg!(test) {
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
std::cmp::min(32, recursion_limit)
} else {
recursion_limit
});
};
Expander {
current_file_id,
module,
@ -194,7 +193,7 @@ impl Expander {
let Some(call_id) = value else {
return ExpandResult { value: None, err };
};
if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
if self.recursion_depth as usize > self.recursion_limit {
self.recursion_depth = u32::MAX;
cov_mark::hit!(your_stack_belongs_to_me);
return ExpandResult::only_err(ExpandError::new(

View file

@ -19,7 +19,6 @@ use hir_expand::{
use intern::{sym, Interned};
use itertools::{izip, Itertools};
use la_arena::Idx;
use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId};
use syntax::ast;
@ -55,8 +54,8 @@ use crate::{
UnresolvedMacro, UseId, UseLoc,
};
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
const GLOB_RECURSION_LIMIT: usize = 100;
const FIXED_POINT_LIMIT: usize = 8192;
pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
let crate_graph = db.crate_graph();
@ -393,7 +392,7 @@ impl DefCollector<'_> {
}
i += 1;
if FIXED_POINT_LIMIT.check(i).is_err() {
if i > FIXED_POINT_LIMIT {
tracing::error!("name resolution is stuck");
break 'resolve_attr;
}
@ -993,7 +992,7 @@ impl DefCollector<'_> {
import: Option<ImportOrExternCrate>,
depth: usize,
) {
if GLOB_RECURSION_LIMIT.check(depth).is_err() {
if depth > GLOB_RECURSION_LIMIT {
// prevent stack overflows (but this shouldn't be possible)
panic!("infinite recursion in glob imports!");
}
@ -1470,8 +1469,7 @@ impl DefCollector<'_> {
depth: usize,
container: ItemContainerId,
) {
let recursion_limit = Limit::new(self.def_map.recursion_limit() as usize);
if recursion_limit.check(depth).is_err() {
if depth > self.def_map.recursion_limit() as usize {
cov_mark::hit!(macro_expansion_overflow);
tracing::warn!("macro expansion is too deep");
return;
@ -1499,7 +1497,6 @@ impl DefCollector<'_> {
fn finish(mut self) -> DefMap {
// Emit diagnostics for all remaining unexpanded macros.
let _p = tracing::info_span!("DefCollector::finish").entered();
for directive in &self.unresolved_macros {

View file

@ -2,12 +2,11 @@
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
use hir_expand::{name::Name, HirFileIdExt};
use limit::Limit;
use span::EditionedFileId;
use crate::{db::DefDatabase, HirFileId};
static MOD_DEPTH_LIMIT: Limit = Limit::new(32);
const MOD_DEPTH_LIMIT: usize = 32;
#[derive(Clone, Debug)]
pub(super) struct ModDir {
@ -50,7 +49,7 @@ impl ModDir {
fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
let depth = self.depth + 1;
if MOD_DEPTH_LIMIT.check(depth as usize).is_err() {
if depth as usize > MOD_DEPTH_LIMIT {
tracing::error!("MOD_DEPTH_LIMIT exceeded");
cov_mark::hit!(circular_mods);
return None;

View file

@ -31,7 +31,6 @@ cfg.workspace = true
syntax.workspace = true
tt.workspace = true
mbe.workspace = true
limit.workspace = true
span.workspace = true
parser.workspace = true
syntax-bridge.workspace = true

View file

@ -2,7 +2,6 @@
use base_db::{ra_salsa, CrateId, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
@ -35,7 +34,7 @@ type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
/// an error will be emitted.
///
/// Actual max for `analysis-stats .` at some point: 30672.
static TOKEN_LIMIT: Limit = Limit::new(2_097_152);
const TOKEN_LIMIT: usize = 2_097_152;
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
@ -740,20 +739,19 @@ pub(crate) fn token_tree_to_syntax_node(
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
let tt = tt.top_subtree();
let count = tt.count();
if TOKEN_LIMIT.check(count).is_err() {
if count <= TOKEN_LIMIT {
Ok(())
} else {
Err(ExpandResult {
value: (),
err: Some(ExpandError::other(
tt.delimiter.open,
format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
count,
TOKEN_LIMIT.inner(),
count, TOKEN_LIMIT,
),
)),
})
} else {
Ok(())
}
}

View file

@ -47,7 +47,6 @@ hir-def.workspace = true
hir-expand.workspace = true
base-db.workspace = true
syntax.workspace = true
limit.workspace = true
span.workspace = true
[dev-dependencies]

View file

@ -9,7 +9,6 @@ use chalk_ir::cast::Cast;
use hir_def::lang_item::LangItem;
use hir_expand::name::Name;
use intern::sym;
use limit::Limit;
use triomphe::Arc;
use crate::{
@ -17,7 +16,7 @@ use crate::{
TraitEnvironment, Ty, TyBuilder, TyKind,
};
static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(20);
const AUTODEREF_RECURSION_LIMIT: usize = 20;
#[derive(Debug)]
pub(crate) enum AutoderefKind {
@ -140,7 +139,7 @@ impl<T: TrackAutoderefSteps> Iterator for Autoderef<'_, '_, T> {
return Some((self.ty.clone(), 0));
}
if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
if self.steps.len() > AUTODEREF_RECURSION_LIMIT {
return None;
}

View file

@ -30,7 +30,6 @@ bitflags.workspace = true
# local deps
base-db.workspace = true
limit.workspace = true
parser.workspace = true
profile.workspace = true
stdx.workspace = true

View file

@ -357,7 +357,7 @@ fn path_applicable_imports(
let mod_path = mod_path(item)?;
Some(LocatedImport::new(mod_path, item, item))
})
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect()
}
// we have some unresolved qualifier that we search an import for
@ -383,7 +383,7 @@ fn path_applicable_imports(
qualifier_rest,
)
})
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect(),
}
}

View file

@ -6,7 +6,6 @@ use std::ops::ControlFlow;
use either::Either;
use hir::{import_map, Crate, ItemInNs, Module, Semantics};
use limit::Limit;
use crate::{
imports::import_assets::NameToImport,
@ -15,7 +14,7 @@ use crate::{
};
/// A value to use, when uncertain which limit to pick.
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
pub const DEFAULT_QUERY_SEARCH_LIMIT: usize = 100;
pub use import_map::AssocSearchMode;

View file

@ -1,16 +0,0 @@
[package]
name = "limit"
version = "0.0.0"
repository.workspace = true
description = "A struct to enforce limits for rust-analyzer."
authors.workspace = true
edition.workspace = true
license.workspace = true
rust-version.workspace = true
[features]
tracking = []
[lints]
workspace = true

View file

@ -1,67 +0,0 @@
//! limit defines a struct to enforce limits.
#[cfg(feature = "tracking")]
use std::sync::atomic::AtomicUsize;
/// Represents a struct used to enforce a numerical limit.
#[derive(Debug)]
pub struct Limit {
upper_bound: usize,
#[cfg(feature = "tracking")]
max: AtomicUsize,
}
impl Limit {
/// Creates a new limit.
#[inline]
pub const fn new(upper_bound: usize) -> Self {
Self {
upper_bound,
#[cfg(feature = "tracking")]
max: AtomicUsize::new(0),
}
}
/// Creates a new limit.
#[inline]
#[cfg(feature = "tracking")]
pub const fn new_tracking(upper_bound: usize) -> Self {
Self {
upper_bound,
#[cfg(feature = "tracking")]
max: AtomicUsize::new(1),
}
}
/// Gets the underlying numeric limit.
#[inline]
pub const fn inner(&self) -> usize {
self.upper_bound
}
/// Checks whether the given value is below the limit.
/// Returns `Ok` when `other` is below `self`, and `Err` otherwise.
#[inline]
pub fn check(&self, other: usize) -> Result<(), ()> {
if other > self.upper_bound {
Err(())
} else {
#[cfg(feature = "tracking")]
loop {
use std::sync::atomic::Ordering;
let old_max = self.max.load(Ordering::Relaxed);
if other <= old_max || old_max == 0 {
break;
}
_ = self.max.compare_exchange_weak(
old_max,
other,
Ordering::Relaxed,
Ordering::Relaxed,
);
}
Ok(())
}
}
}

View file

@ -15,7 +15,6 @@ doctest = false
[dependencies]
drop_bomb = "0.1.5"
ra-ap-rustc_lexer.workspace = true
limit.workspace = true
tracing = { workspace = true, optional = true }
edition.workspace = true

View file

@ -3,7 +3,6 @@
use std::cell::Cell;
use drop_bomb::DropBomb;
use limit::Limit;
use crate::{
event::Event,
@ -30,7 +29,7 @@ pub(crate) struct Parser<'t> {
edition: Edition,
}
static PARSER_STEP_LIMIT: Limit = Limit::new(15_000_000);
const PARSER_STEP_LIMIT: usize = 15_000_000;
impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
@ -54,7 +53,7 @@ impl<'t> Parser<'t> {
assert!(n <= 3);
let steps = self.steps.get();
assert!(PARSER_STEP_LIMIT.check(steps as usize).is_ok(), "the parser seems stuck");
assert!((steps as usize) < PARSER_STEP_LIMIT, "the parser seems stuck");
self.steps.set(steps + 1);
self.inp.kind(self.pos + n)