mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-29 02:52:11 +00:00
Merge pull request #19688 from ChayimFriedman2/less-unused
internal: More `shrink_to_fit()` and upgrade dashmap and hashbrown
This commit is contained in:
commit
71a3888d07
14 changed files with 167 additions and 176 deletions
19
Cargo.lock
generated
19
Cargo.lock
generated
|
|
@ -79,7 +79,7 @@ name = "base-db"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg",
|
"cfg",
|
||||||
"dashmap 5.5.3",
|
"dashmap",
|
||||||
"intern",
|
"intern",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"query-group-macro",
|
"query-group-macro",
|
||||||
|
|
@ -323,19 +323,6 @@ dependencies = [
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "dashmap"
|
|
||||||
version = "5.5.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"hashbrown 0.14.5",
|
|
||||||
"lock_api",
|
|
||||||
"once_cell",
|
|
||||||
"parking_lot_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dashmap"
|
name = "dashmap"
|
||||||
version = "6.1.0"
|
version = "6.1.0"
|
||||||
|
|
@ -1051,7 +1038,7 @@ dependencies = [
|
||||||
name = "intern"
|
name = "intern"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dashmap 5.5.3",
|
"dashmap",
|
||||||
"hashbrown 0.14.5",
|
"hashbrown 0.14.5",
|
||||||
"rustc-hash 2.1.1",
|
"rustc-hash 2.1.1",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
|
|
@ -2045,7 +2032,7 @@ checksum = "1be22155f8d9732518b2db2bf379fe6f0b2375e76b08b7c8fe6c1b887d548c24"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"boxcar",
|
"boxcar",
|
||||||
"crossbeam-queue",
|
"crossbeam-queue",
|
||||||
"dashmap 6.1.0",
|
"dashmap",
|
||||||
"hashbrown 0.15.2",
|
"hashbrown 0.15.2",
|
||||||
"hashlink",
|
"hashlink",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
|
|
|
||||||
|
|
@ -158,11 +158,10 @@ triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
|
||||||
url = "2.5.4"
|
url = "2.5.4"
|
||||||
xshell = "0.2.7"
|
xshell = "0.2.7"
|
||||||
|
|
||||||
|
|
||||||
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
|
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
|
||||||
dashmap = { version = "=5.5.3", features = ["raw-api"] }
|
dashmap = { version = "=6.1.0", features = ["raw-api", "inline"] }
|
||||||
# We need to freeze the version of the crate, as it needs to match with dashmap
|
# We need to freeze the version of the crate, as it needs to match with dashmap
|
||||||
hashbrown = { version = "=0.14.5", features = [
|
hashbrown = { version = "0.14.0", features = [
|
||||||
"inline-more",
|
"inline-more",
|
||||||
], default-features = false }
|
], default-features = false }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -422,8 +422,8 @@ impl CrateGraphBuilder {
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
display_name: Option<CrateDisplayName>,
|
display_name: Option<CrateDisplayName>,
|
||||||
version: Option<String>,
|
version: Option<String>,
|
||||||
cfg_options: CfgOptions,
|
mut cfg_options: CfgOptions,
|
||||||
potential_cfg_options: Option<CfgOptions>,
|
mut potential_cfg_options: Option<CfgOptions>,
|
||||||
mut env: Env,
|
mut env: Env,
|
||||||
origin: CrateOrigin,
|
origin: CrateOrigin,
|
||||||
is_proc_macro: bool,
|
is_proc_macro: bool,
|
||||||
|
|
@ -431,6 +431,10 @@ impl CrateGraphBuilder {
|
||||||
ws_data: Arc<CrateWorkspaceData>,
|
ws_data: Arc<CrateWorkspaceData>,
|
||||||
) -> CrateBuilderId {
|
) -> CrateBuilderId {
|
||||||
env.entries.shrink_to_fit();
|
env.entries.shrink_to_fit();
|
||||||
|
cfg_options.shrink_to_fit();
|
||||||
|
if let Some(potential_cfg_options) = &mut potential_cfg_options {
|
||||||
|
potential_cfg_options.shrink_to_fit();
|
||||||
|
}
|
||||||
self.arena.alloc(CrateBuilder {
|
self.arena.alloc(CrateBuilder {
|
||||||
basic: CrateData {
|
basic: CrateData {
|
||||||
root_file_id,
|
root_file_id,
|
||||||
|
|
|
||||||
|
|
@ -110,6 +110,11 @@ impl CfgOptions {
|
||||||
enabled.sort_unstable();
|
enabled.sort_unstable();
|
||||||
HashableCfgOptions { _enabled: enabled }
|
HashableCfgOptions { _enabled: enabled }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn shrink_to_fit(&mut self) {
|
||||||
|
self.enabled.shrink_to_fit();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Extend<CfgAtom> for CfgOptions {
|
impl Extend<CfgAtom> for CfgOptions {
|
||||||
|
|
|
||||||
|
|
@ -223,7 +223,10 @@ impl ItemTree {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn shrink_to_fit(&mut self) {
|
fn shrink_to_fit(&mut self) {
|
||||||
if let Some(data) = &mut self.data {
|
let ItemTree { top_level, attrs, data } = self;
|
||||||
|
top_level.shrink_to_fit();
|
||||||
|
attrs.shrink_to_fit();
|
||||||
|
if let Some(data) = data {
|
||||||
let ItemTreeData {
|
let ItemTreeData {
|
||||||
uses,
|
uses,
|
||||||
extern_crates,
|
extern_crates,
|
||||||
|
|
|
||||||
|
|
@ -493,7 +493,7 @@ pub struct InferenceResult {
|
||||||
/// ```
|
/// ```
|
||||||
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
|
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
|
||||||
pub binding_modes: ArenaMap<PatId, BindingMode>,
|
pub binding_modes: ArenaMap<PatId, BindingMode>,
|
||||||
pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
|
pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
|
||||||
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
|
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
|
||||||
// FIXME: remove this field
|
// FIXME: remove this field
|
||||||
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
|
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
|
||||||
|
|
@ -785,8 +785,8 @@ impl<'a> InferenceContext<'a> {
|
||||||
// Comment from rustc:
|
// Comment from rustc:
|
||||||
// Even though coercion casts provide type hints, we check casts after fallback for
|
// Even though coercion casts provide type hints, we check casts after fallback for
|
||||||
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
|
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
|
||||||
let mut apply_adjustments = |expr, adj| {
|
let mut apply_adjustments = |expr, adj: Vec<_>| {
|
||||||
expr_adjustments.insert(expr, adj);
|
expr_adjustments.insert(expr, adj.into_boxed_slice());
|
||||||
};
|
};
|
||||||
let mut set_coercion_cast = |expr| {
|
let mut set_coercion_cast = |expr| {
|
||||||
coercion_casts.insert(expr);
|
coercion_casts.insert(expr);
|
||||||
|
|
@ -808,22 +808,27 @@ impl<'a> InferenceContext<'a> {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
*has_errors = *has_errors || ty.contains_unknown();
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
|
type_of_expr.shrink_to_fit();
|
||||||
for ty in type_of_pat.values_mut() {
|
for ty in type_of_pat.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
*has_errors = *has_errors || ty.contains_unknown();
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
|
type_of_pat.shrink_to_fit();
|
||||||
for ty in type_of_binding.values_mut() {
|
for ty in type_of_binding.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
*has_errors = *has_errors || ty.contains_unknown();
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
|
type_of_binding.shrink_to_fit();
|
||||||
for ty in type_of_rpit.values_mut() {
|
for ty in type_of_rpit.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
*has_errors = *has_errors || ty.contains_unknown();
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
|
type_of_rpit.shrink_to_fit();
|
||||||
for ty in type_of_for_iterator.values_mut() {
|
for ty in type_of_for_iterator.values_mut() {
|
||||||
*ty = table.resolve_completely(ty.clone());
|
*ty = table.resolve_completely(ty.clone());
|
||||||
*has_errors = *has_errors || ty.contains_unknown();
|
*has_errors = *has_errors || ty.contains_unknown();
|
||||||
}
|
}
|
||||||
|
type_of_for_iterator.shrink_to_fit();
|
||||||
|
|
||||||
*has_errors |= !type_mismatches.is_empty();
|
*has_errors |= !type_mismatches.is_empty();
|
||||||
|
|
||||||
|
|
@ -838,6 +843,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
)
|
)
|
||||||
.is_ok()
|
.is_ok()
|
||||||
});
|
});
|
||||||
|
type_mismatches.shrink_to_fit();
|
||||||
diagnostics.retain_mut(|diagnostic| {
|
diagnostics.retain_mut(|diagnostic| {
|
||||||
use InferenceDiagnostic::*;
|
use InferenceDiagnostic::*;
|
||||||
match diagnostic {
|
match diagnostic {
|
||||||
|
|
@ -866,24 +872,29 @@ impl<'a> InferenceContext<'a> {
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
|
diagnostics.shrink_to_fit();
|
||||||
for (_, subst) in method_resolutions.values_mut() {
|
for (_, subst) in method_resolutions.values_mut() {
|
||||||
*subst = table.resolve_completely(subst.clone());
|
*subst = table.resolve_completely(subst.clone());
|
||||||
*has_errors =
|
*has_errors =
|
||||||
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
||||||
}
|
}
|
||||||
|
method_resolutions.shrink_to_fit();
|
||||||
for (_, subst) in assoc_resolutions.values_mut() {
|
for (_, subst) in assoc_resolutions.values_mut() {
|
||||||
*subst = table.resolve_completely(subst.clone());
|
*subst = table.resolve_completely(subst.clone());
|
||||||
*has_errors =
|
*has_errors =
|
||||||
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
||||||
}
|
}
|
||||||
|
assoc_resolutions.shrink_to_fit();
|
||||||
for adjustment in expr_adjustments.values_mut().flatten() {
|
for adjustment in expr_adjustments.values_mut().flatten() {
|
||||||
adjustment.target = table.resolve_completely(adjustment.target.clone());
|
adjustment.target = table.resolve_completely(adjustment.target.clone());
|
||||||
*has_errors = *has_errors || adjustment.target.contains_unknown();
|
*has_errors = *has_errors || adjustment.target.contains_unknown();
|
||||||
}
|
}
|
||||||
|
expr_adjustments.shrink_to_fit();
|
||||||
for adjustment in pat_adjustments.values_mut().flatten() {
|
for adjustment in pat_adjustments.values_mut().flatten() {
|
||||||
*adjustment = table.resolve_completely(adjustment.clone());
|
*adjustment = table.resolve_completely(adjustment.clone());
|
||||||
*has_errors = *has_errors || adjustment.contains_unknown();
|
*has_errors = *has_errors || adjustment.contains_unknown();
|
||||||
}
|
}
|
||||||
|
pat_adjustments.shrink_to_fit();
|
||||||
result.tuple_field_access_types = tuple_field_accesses_rev
|
result.tuple_field_access_types = tuple_field_accesses_rev
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
|
|
@ -893,6 +904,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
result.tuple_field_access_types.shrink_to_fit();
|
||||||
|
|
||||||
result.diagnostics = diagnostics;
|
result.diagnostics = diagnostics;
|
||||||
|
|
||||||
|
|
@ -1261,7 +1273,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
self.result.type_of_expr.insert(expr, ty);
|
self.result.type_of_expr.insert(expr, ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
|
fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) {
|
||||||
if adjustments.is_empty() {
|
if adjustments.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -869,8 +869,8 @@ impl CapturedItemWithoutTy {
|
||||||
impl InferenceContext<'_> {
|
impl InferenceContext<'_> {
|
||||||
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
|
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
|
||||||
let r = self.place_of_expr_without_adjust(tgt_expr)?;
|
let r = self.place_of_expr_without_adjust(tgt_expr)?;
|
||||||
let default = vec![];
|
let adjustments =
|
||||||
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
|
self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
|
||||||
apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
|
apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1701,7 +1701,7 @@ impl InferenceContext<'_> {
|
||||||
for (derefed_callee, callee_ty, params, expr) in exprs {
|
for (derefed_callee, callee_ty, params, expr) in exprs {
|
||||||
if let &Expr::Call { callee, .. } = &self.body[expr] {
|
if let &Expr::Call { callee, .. } = &self.body[expr] {
|
||||||
let mut adjustments =
|
let mut adjustments =
|
||||||
self.result.expr_adjustments.remove(&callee).unwrap_or_default();
|
self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec();
|
||||||
self.write_fn_trait_method_resolution(
|
self.write_fn_trait_method_resolution(
|
||||||
kind,
|
kind,
|
||||||
&derefed_callee,
|
&derefed_callee,
|
||||||
|
|
@ -1710,7 +1710,7 @@ impl InferenceContext<'_> {
|
||||||
¶ms,
|
¶ms,
|
||||||
expr,
|
expr,
|
||||||
);
|
);
|
||||||
self.result.expr_adjustments.insert(callee, adjustments);
|
self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -148,11 +148,11 @@ impl CoerceMany {
|
||||||
if let (Ok(result1), Ok(result2)) = (result1, result2) {
|
if let (Ok(result1), Ok(result2)) = (result1, result2) {
|
||||||
ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals });
|
ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals });
|
||||||
for &e in &self.expressions {
|
for &e in &self.expressions {
|
||||||
ctx.write_expr_adj(e, result1.value.0.clone());
|
ctx.write_expr_adj(e, result1.value.0.clone().into_boxed_slice());
|
||||||
}
|
}
|
||||||
ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals });
|
ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals });
|
||||||
if let Some(expr) = expr {
|
if let Some(expr) = expr {
|
||||||
ctx.write_expr_adj(expr, result2.value.0);
|
ctx.write_expr_adj(expr, result2.value.0.into_boxed_slice());
|
||||||
self.expressions.push(expr);
|
self.expressions.push(expr);
|
||||||
}
|
}
|
||||||
return self.final_ty = Some(target_ty);
|
return self.final_ty = Some(target_ty);
|
||||||
|
|
@ -182,7 +182,7 @@ impl CoerceMany {
|
||||||
{
|
{
|
||||||
self.final_ty = Some(res);
|
self.final_ty = Some(res);
|
||||||
for &e in &self.expressions {
|
for &e in &self.expressions {
|
||||||
ctx.write_expr_adj(e, adjustments.clone());
|
ctx.write_expr_adj(e, adjustments.clone().into_boxed_slice());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match cause {
|
match cause {
|
||||||
|
|
@ -263,7 +263,7 @@ impl InferenceContext<'_> {
|
||||||
) -> Result<Ty, TypeError> {
|
) -> Result<Ty, TypeError> {
|
||||||
let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?;
|
let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?;
|
||||||
if let Some(expr) = expr {
|
if let Some(expr) = expr {
|
||||||
self.write_expr_adj(expr, adjustments);
|
self.write_expr_adj(expr, adjustments.into_boxed_slice());
|
||||||
}
|
}
|
||||||
Ok(ty)
|
Ok(ty)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -812,7 +812,7 @@ impl InferenceContext<'_> {
|
||||||
self_ty.clone(),
|
self_ty.clone(),
|
||||||
self.table.new_lifetime_var(),
|
self.table.new_lifetime_var(),
|
||||||
));
|
));
|
||||||
self.write_expr_adj(*base, adj);
|
self.write_expr_adj(*base, adj.into_boxed_slice());
|
||||||
if let Some(func) = self
|
if let Some(func) = self
|
||||||
.db
|
.db
|
||||||
.trait_items(index_trait)
|
.trait_items(index_trait)
|
||||||
|
|
@ -1356,10 +1356,10 @@ impl InferenceContext<'_> {
|
||||||
if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) {
|
if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) {
|
||||||
self.write_expr_adj(
|
self.write_expr_adj(
|
||||||
lhs,
|
lhs,
|
||||||
vec![Adjustment {
|
Box::new([Adjustment {
|
||||||
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
||||||
target: p_left.clone(),
|
target: p_left.clone(),
|
||||||
}],
|
}]),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1368,10 +1368,10 @@ impl InferenceContext<'_> {
|
||||||
if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) {
|
if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) {
|
||||||
self.write_expr_adj(
|
self.write_expr_adj(
|
||||||
rhs,
|
rhs,
|
||||||
vec![Adjustment {
|
Box::new([Adjustment {
|
||||||
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
||||||
target: p_right.clone(),
|
target: p_right.clone(),
|
||||||
}],
|
}]),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1627,7 +1627,7 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
match self.lookup_field(&receiver_ty, name) {
|
match self.lookup_field(&receiver_ty, name) {
|
||||||
Some((ty, field_id, adjustments, is_public)) => {
|
Some((ty, field_id, adjustments, is_public)) => {
|
||||||
self.write_expr_adj(receiver, adjustments);
|
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||||
self.result.field_resolutions.insert(tgt_expr, field_id);
|
self.result.field_resolutions.insert(tgt_expr, field_id);
|
||||||
if !is_public {
|
if !is_public {
|
||||||
if let Either::Left(field) = field_id {
|
if let Either::Left(field) = field_id {
|
||||||
|
|
@ -1662,7 +1662,7 @@ impl InferenceContext<'_> {
|
||||||
Some((adjust, func, _)) => {
|
Some((adjust, func, _)) => {
|
||||||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||||
let substs = self.substs_for_method_call(tgt_expr, func.into(), None);
|
let substs = self.substs_for_method_call(tgt_expr, func.into(), None);
|
||||||
self.write_expr_adj(receiver, adjustments);
|
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||||
|
|
||||||
self.check_method_call(
|
self.check_method_call(
|
||||||
|
|
@ -1725,7 +1725,7 @@ impl InferenceContext<'_> {
|
||||||
tgt_expr,
|
tgt_expr,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.write_expr_adj(callee, adjustments);
|
self.write_expr_adj(callee, adjustments.into_boxed_slice());
|
||||||
(params, ret_ty)
|
(params, ret_ty)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
|
@ -1809,7 +1809,7 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||||
self.write_expr_adj(receiver, adjustments);
|
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||||
|
|
||||||
let substs = self.substs_for_method_call(tgt_expr, func.into(), generic_args);
|
let substs = self.substs_for_method_call(tgt_expr, func.into(), generic_args);
|
||||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||||
|
|
@ -1828,7 +1828,7 @@ impl InferenceContext<'_> {
|
||||||
let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
|
let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
|
||||||
{
|
{
|
||||||
Some((ty, field_id, adjustments, _public)) => {
|
Some((ty, field_id, adjustments, _public)) => {
|
||||||
self.write_expr_adj(receiver, adjustments);
|
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||||
self.result.field_resolutions.insert(tgt_expr, field_id);
|
self.result.field_resolutions.insert(tgt_expr, field_id);
|
||||||
Some(ty)
|
Some(ty)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ impl_internable!(
|
||||||
InternedWrapper<ConstData>,
|
InternedWrapper<ConstData>,
|
||||||
InternedWrapper<ConstScalar>,
|
InternedWrapper<ConstScalar>,
|
||||||
InternedWrapper<Vec<CanonicalVarKind>>,
|
InternedWrapper<Vec<CanonicalVarKind>>,
|
||||||
InternedWrapper<Vec<ProgramClause>>,
|
InternedWrapper<Box<[ProgramClause]>>,
|
||||||
InternedWrapper<Vec<QuantifiedWhereClause>>,
|
InternedWrapper<Vec<QuantifiedWhereClause>>,
|
||||||
InternedWrapper<SmallVec<[Variance; 16]>>,
|
InternedWrapper<SmallVec<[Variance; 16]>>,
|
||||||
);
|
);
|
||||||
|
|
@ -60,7 +60,7 @@ impl chalk_ir::interner::Interner for Interner {
|
||||||
type InternedGoal = Arc<GoalData>;
|
type InternedGoal = Arc<GoalData>;
|
||||||
type InternedGoals = Vec<Goal>;
|
type InternedGoals = Vec<Goal>;
|
||||||
type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
|
type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
|
||||||
type InternedProgramClauses = Interned<InternedWrapper<Vec<ProgramClause>>>;
|
type InternedProgramClauses = Interned<InternedWrapper<Box<[ProgramClause]>>>;
|
||||||
type InternedProgramClause = ProgramClauseData;
|
type InternedProgramClause = ProgramClauseData;
|
||||||
type InternedQuantifiedWhereClauses = Interned<InternedWrapper<Vec<QuantifiedWhereClause>>>;
|
type InternedQuantifiedWhereClauses = Interned<InternedWrapper<Vec<QuantifiedWhereClause>>>;
|
||||||
type InternedVariableKinds = Interned<InternedWrapper<Vec<VariableKind>>>;
|
type InternedVariableKinds = Interned<InternedWrapper<Vec<VariableKind>>>;
|
||||||
|
|
|
||||||
|
|
@ -3,22 +3,20 @@
|
||||||
//! Eventually this should probably be replaced with salsa-based interning.
|
//! Eventually this should probably be replaced with salsa-based interning.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
borrow::Borrow,
|
||||||
fmt::{self, Debug, Display},
|
fmt::{self, Debug, Display},
|
||||||
hash::{BuildHasherDefault, Hash, Hasher},
|
hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
|
||||||
ops::Deref,
|
ops::Deref,
|
||||||
sync::OnceLock,
|
sync::OnceLock,
|
||||||
};
|
};
|
||||||
|
|
||||||
use dashmap::{DashMap, SharedValue};
|
use dashmap::{DashMap, SharedValue};
|
||||||
use hashbrown::{HashMap, hash_map::RawEntryMut};
|
use hashbrown::raw::RawTable;
|
||||||
use rustc_hash::FxHasher;
|
use rustc_hash::FxHasher;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
|
type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
|
||||||
type Guard<T> = dashmap::RwLockWriteGuard<
|
type Guard<T> = dashmap::RwLockWriteGuard<'static, RawTable<(Arc<T>, SharedValue<()>)>>;
|
||||||
'static,
|
|
||||||
HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>,
|
|
||||||
>;
|
|
||||||
|
|
||||||
mod symbol;
|
mod symbol;
|
||||||
pub use self::symbol::{Symbol, symbols as sym};
|
pub use self::symbol::{Symbol, symbols as sym};
|
||||||
|
|
@ -28,54 +26,61 @@ pub struct Interned<T: Internable + ?Sized> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Internable> Interned<T> {
|
impl<T: Internable> Interned<T> {
|
||||||
|
#[inline]
|
||||||
pub fn new(obj: T) -> Self {
|
pub fn new(obj: T) -> Self {
|
||||||
let (mut shard, hash) = Self::select(&obj);
|
Self::new_generic(obj)
|
||||||
// Atomically,
|
|
||||||
// - check if `obj` is already in the map
|
|
||||||
// - if so, clone its `Arc` and return it
|
|
||||||
// - if not, box it up, insert it, and return a clone
|
|
||||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
|
||||||
// insert the same object between us looking it up and inserting it.
|
|
||||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) {
|
|
||||||
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
|
|
||||||
RawEntryMut::Vacant(vac) => Self {
|
|
||||||
arc: vac.insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(())).0.clone(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Interned<str> {
|
impl Interned<str> {
|
||||||
|
#[inline]
|
||||||
pub fn new_str(s: &str) -> Self {
|
pub fn new_str(s: &str) -> Self {
|
||||||
let (mut shard, hash) = Self::select(s);
|
Self::new_generic(s)
|
||||||
// Atomically,
|
|
||||||
// - check if `obj` is already in the map
|
|
||||||
// - if so, clone its `Arc` and return it
|
|
||||||
// - if not, box it up, insert it, and return a clone
|
|
||||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
|
||||||
// insert the same object between us looking it up and inserting it.
|
|
||||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
|
|
||||||
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
|
|
||||||
RawEntryMut::Vacant(vac) => Self {
|
|
||||||
arc: vac.insert_hashed_nocheck(hash, Arc::from(s), SharedValue::new(())).0.clone(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Internable + ?Sized> Interned<T> {
|
impl<T: Internable + ?Sized> Interned<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn select(obj: &T) -> (Guard<T>, u64) {
|
pub fn new_generic<U>(obj: U) -> Self
|
||||||
|
where
|
||||||
|
U: Borrow<T>,
|
||||||
|
Arc<T>: From<U>,
|
||||||
|
{
|
||||||
let storage = T::storage().get();
|
let storage = T::storage().get();
|
||||||
let hash = {
|
let (mut shard, hash) = Self::select(storage, obj.borrow());
|
||||||
let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher());
|
// Atomically,
|
||||||
obj.hash(&mut hasher);
|
// - check if `obj` is already in the map
|
||||||
hasher.finish()
|
// - if so, clone its `Arc` and return it
|
||||||
|
// - if not, box it up, insert it, and return a clone
|
||||||
|
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||||
|
// insert the same object between us looking it up and inserting it.
|
||||||
|
let bucket = match shard.find_or_find_insert_slot(
|
||||||
|
hash,
|
||||||
|
|(other, _)| **other == *obj.borrow(),
|
||||||
|
|(x, _)| Self::hash(storage, x),
|
||||||
|
) {
|
||||||
|
Ok(bucket) => bucket,
|
||||||
|
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
|
||||||
|
Err(insert_slot) => unsafe {
|
||||||
|
shard.insert_in_slot(hash, insert_slot, (Arc::from(obj), SharedValue::new(())))
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
// SAFETY: We just retrieved/inserted this bucket.
|
||||||
|
unsafe { Self { arc: bucket.as_ref().0.clone() } }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn select(storage: &'static InternMap<T>, obj: &T) -> (Guard<T>, u64) {
|
||||||
|
let hash = Self::hash(storage, obj);
|
||||||
let shard_idx = storage.determine_shard(hash as usize);
|
let shard_idx = storage.determine_shard(hash as usize);
|
||||||
let shard = &storage.shards()[shard_idx];
|
let shard = &storage.shards()[shard_idx];
|
||||||
(shard.write(), hash)
|
(shard.write(), hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn hash(storage: &'static InternMap<T>, obj: &T) -> u64 {
|
||||||
|
storage.hasher().hash_one(obj)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Internable + ?Sized> Drop for Interned<T> {
|
impl<T: Internable + ?Sized> Drop for Interned<T> {
|
||||||
|
|
@ -93,21 +98,20 @@ impl<T: Internable + ?Sized> Drop for Interned<T> {
|
||||||
impl<T: Internable + ?Sized> Interned<T> {
|
impl<T: Internable + ?Sized> Interned<T> {
|
||||||
#[cold]
|
#[cold]
|
||||||
fn drop_slow(&mut self) {
|
fn drop_slow(&mut self) {
|
||||||
let (mut shard, hash) = Self::select(&self.arc);
|
let storage = T::storage().get();
|
||||||
|
let (mut shard, hash) = Self::select(storage, &self.arc);
|
||||||
|
|
||||||
if Arc::count(&self.arc) != 2 {
|
if Arc::count(&self.arc) != 2 {
|
||||||
// Another thread has interned another copy
|
// Another thread has interned another copy
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &self.arc) {
|
shard.remove_entry(hash, |(other, _)| **other == *self.arc);
|
||||||
RawEntryMut::Occupied(occ) => occ.remove(),
|
|
||||||
RawEntryMut::Vacant(_) => unreachable!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Shrink the backing storage if the shard is less than 50% occupied.
|
// Shrink the backing storage if the shard is less than 50% occupied.
|
||||||
if shard.len() * 2 < shard.capacity() {
|
if shard.len() * 2 < shard.capacity() {
|
||||||
shard.shrink_to_fit();
|
let len = shard.len();
|
||||||
|
shard.shrink_to(len, |(x, _)| Self::hash(storage, x));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,16 +2,15 @@
|
||||||
//! supporting compile time declaration of symbols that will never be freed.
|
//! supporting compile time declaration of symbols that will never be freed.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Borrow,
|
|
||||||
fmt,
|
fmt,
|
||||||
hash::{BuildHasherDefault, Hash, Hasher},
|
hash::{BuildHasher, BuildHasherDefault, Hash},
|
||||||
mem::{self, ManuallyDrop},
|
mem::{self, ManuallyDrop},
|
||||||
ptr::NonNull,
|
ptr::NonNull,
|
||||||
sync::OnceLock,
|
sync::OnceLock,
|
||||||
};
|
};
|
||||||
|
|
||||||
use dashmap::{DashMap, SharedValue};
|
use dashmap::{DashMap, SharedValue};
|
||||||
use hashbrown::{HashMap, hash_map::RawEntryMut};
|
use hashbrown::raw::RawTable;
|
||||||
use rustc_hash::FxHasher;
|
use rustc_hash::FxHasher;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
|
@ -127,31 +126,39 @@ impl fmt::Debug for Symbol {
|
||||||
const _: () = assert!(size_of::<Symbol>() == size_of::<NonNull<()>>());
|
const _: () = assert!(size_of::<Symbol>() == size_of::<NonNull<()>>());
|
||||||
const _: () = assert!(align_of::<Symbol>() == align_of::<NonNull<()>>());
|
const _: () = assert!(align_of::<Symbol>() == align_of::<NonNull<()>>());
|
||||||
|
|
||||||
static MAP: OnceLock<DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>> = OnceLock::new();
|
type Map = DashMap<Symbol, (), BuildHasherDefault<FxHasher>>;
|
||||||
|
static MAP: OnceLock<Map> = OnceLock::new();
|
||||||
|
|
||||||
impl Symbol {
|
impl Symbol {
|
||||||
pub fn intern(s: &str) -> Self {
|
pub fn intern(s: &str) -> Self {
|
||||||
let (mut shard, hash) = Self::select_shard(s);
|
let storage = MAP.get_or_init(symbols::prefill);
|
||||||
|
let (mut shard, hash) = Self::select_shard(storage, s);
|
||||||
// Atomically,
|
// Atomically,
|
||||||
// - check if `obj` is already in the map
|
// - check if `obj` is already in the map
|
||||||
// - if so, copy out its entry, conditionally bumping the backing Arc and return it
|
// - if so, copy out its entry, conditionally bumping the backing Arc and return it
|
||||||
// - if not, put it into a box and then into an Arc, insert it, bump the ref-count and return the copy
|
// - if not, put it into a box and then into an Arc, insert it, bump the ref-count and return the copy
|
||||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||||
// insert the same object between us looking it up and inserting it.
|
// insert the same object between us looking it up and inserting it.
|
||||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
|
let bucket = match shard.find_or_find_insert_slot(
|
||||||
RawEntryMut::Occupied(occ) => Self { repr: increase_arc_refcount(occ.key().0) },
|
hash,
|
||||||
RawEntryMut::Vacant(vac) => Self {
|
|(other, _)| other.as_str() == s,
|
||||||
repr: increase_arc_refcount(
|
|(x, _)| Self::hash(storage, x.as_str()),
|
||||||
vac.insert_hashed_nocheck(
|
) {
|
||||||
hash,
|
Ok(bucket) => bucket,
|
||||||
SymbolProxy(TaggedArcPtr::arc(Arc::new(Box::<str>::from(s)))),
|
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
|
||||||
|
Err(insert_slot) => unsafe {
|
||||||
|
shard.insert_in_slot(
|
||||||
|
hash,
|
||||||
|
insert_slot,
|
||||||
|
(
|
||||||
|
Symbol { repr: TaggedArcPtr::arc(Arc::new(Box::<str>::from(s))) },
|
||||||
SharedValue::new(()),
|
SharedValue::new(()),
|
||||||
)
|
),
|
||||||
.0
|
)
|
||||||
.0,
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
}
|
};
|
||||||
|
// SAFETY: We just retrieved/inserted this bucket.
|
||||||
|
unsafe { bucket.as_ref().0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn integer(i: usize) -> Self {
|
pub fn integer(i: usize) -> Self {
|
||||||
|
|
@ -180,38 +187,34 @@ impl Symbol {
|
||||||
symbols::__empty.clone()
|
symbols::__empty.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn as_str(&self) -> &str {
|
||||||
self.repr.as_str()
|
self.repr.as_str()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn select_shard(
|
fn select_shard(
|
||||||
|
storage: &'static Map,
|
||||||
s: &str,
|
s: &str,
|
||||||
) -> (
|
) -> (dashmap::RwLockWriteGuard<'static, RawTable<(Symbol, SharedValue<()>)>>, u64) {
|
||||||
dashmap::RwLockWriteGuard<
|
let hash = Self::hash(storage, s);
|
||||||
'static,
|
|
||||||
HashMap<SymbolProxy, SharedValue<()>, BuildHasherDefault<FxHasher>>,
|
|
||||||
>,
|
|
||||||
u64,
|
|
||||||
) {
|
|
||||||
let storage = MAP.get_or_init(symbols::prefill);
|
|
||||||
let hash = {
|
|
||||||
let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher());
|
|
||||||
s.hash(&mut hasher);
|
|
||||||
hasher.finish()
|
|
||||||
};
|
|
||||||
let shard_idx = storage.determine_shard(hash as usize);
|
let shard_idx = storage.determine_shard(hash as usize);
|
||||||
let shard = &storage.shards()[shard_idx];
|
let shard = &storage.shards()[shard_idx];
|
||||||
(shard.write(), hash)
|
(shard.write(), hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn hash(storage: &'static Map, s: &str) -> u64 {
|
||||||
|
storage.hasher().hash_one(s)
|
||||||
|
}
|
||||||
|
|
||||||
#[cold]
|
#[cold]
|
||||||
fn drop_slow(arc: &Arc<Box<str>>) {
|
fn drop_slow(arc: &Arc<Box<str>>) {
|
||||||
let (mut shard, hash) = Self::select_shard(arc);
|
let storage = MAP.get_or_init(symbols::prefill);
|
||||||
|
let (mut shard, hash) = Self::select_shard(storage, arc);
|
||||||
|
|
||||||
match Arc::count(arc) {
|
match Arc::count(arc) {
|
||||||
0 => unreachable!(),
|
0 | 1 => unreachable!(),
|
||||||
1 => unreachable!(),
|
|
||||||
2 => (),
|
2 => (),
|
||||||
_ => {
|
_ => {
|
||||||
// Another thread has interned another copy
|
// Another thread has interned another copy
|
||||||
|
|
@ -219,19 +222,17 @@ impl Symbol {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let ptr = match shard.raw_entry_mut().from_key_hashed_nocheck::<str>(hash, arc.as_ref()) {
|
let s = &***arc;
|
||||||
RawEntryMut::Occupied(occ) => occ.remove_entry(),
|
let (ptr, _) = shard.remove_entry(hash, |(x, _)| x.as_str() == s).unwrap();
|
||||||
RawEntryMut::Vacant(_) => unreachable!(),
|
let ptr = ManuallyDrop::new(ptr);
|
||||||
}
|
|
||||||
.0
|
|
||||||
.0;
|
|
||||||
// SAFETY: We're dropping, we have ownership.
|
// SAFETY: We're dropping, we have ownership.
|
||||||
ManuallyDrop::into_inner(unsafe { ptr.try_as_arc_owned().unwrap() });
|
ManuallyDrop::into_inner(unsafe { ptr.repr.try_as_arc_owned().unwrap() });
|
||||||
debug_assert_eq!(Arc::count(arc), 1);
|
debug_assert_eq!(Arc::count(arc), 1);
|
||||||
|
|
||||||
// Shrink the backing storage if the shard is less than 50% occupied.
|
// Shrink the backing storage if the shard is less than 50% occupied.
|
||||||
if shard.len() * 2 < shard.capacity() {
|
if shard.len() * 2 < shard.capacity() {
|
||||||
shard.shrink_to_fit();
|
let len = shard.len();
|
||||||
|
shard.shrink_to(len, |(x, _)| Self::hash(storage, x.as_str()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -276,22 +277,6 @@ impl fmt::Display for Symbol {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// only exists so we can use `from_key_hashed_nocheck` with a &str
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
struct SymbolProxy(TaggedArcPtr);
|
|
||||||
|
|
||||||
impl Hash for SymbolProxy {
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
self.0.as_str().hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Borrow<str> for SymbolProxy {
|
|
||||||
fn borrow(&self) -> &str {
|
|
||||||
self.0.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,12 @@
|
||||||
//! Module defining all known symbols required by the rest of rust-analyzer.
|
//! Module defining all known symbols required by the rest of rust-analyzer.
|
||||||
#![allow(non_upper_case_globals)]
|
#![allow(non_upper_case_globals)]
|
||||||
|
|
||||||
use std::hash::{BuildHasherDefault, Hash as _, Hasher as _};
|
use std::hash::{BuildHasher, BuildHasherDefault};
|
||||||
|
|
||||||
use dashmap::{DashMap, SharedValue};
|
use dashmap::{DashMap, SharedValue};
|
||||||
use rustc_hash::FxHasher;
|
use rustc_hash::FxHasher;
|
||||||
|
|
||||||
use crate::{
|
use crate::{Symbol, symbol::TaggedArcPtr};
|
||||||
Symbol,
|
|
||||||
symbol::{SymbolProxy, TaggedArcPtr},
|
|
||||||
};
|
|
||||||
|
|
||||||
macro_rules! define_symbols {
|
macro_rules! define_symbols {
|
||||||
(@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => {
|
(@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => {
|
||||||
|
|
@ -28,28 +25,23 @@ macro_rules! define_symbols {
|
||||||
)*
|
)*
|
||||||
|
|
||||||
|
|
||||||
pub(super) fn prefill() -> DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>> {
|
pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
|
||||||
let mut dashmap_ = <DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>>::with_hasher(BuildHasherDefault::default());
|
let mut dashmap_ = <DashMap<Symbol, (), BuildHasherDefault<FxHasher>>>::with_hasher(BuildHasherDefault::default());
|
||||||
|
|
||||||
let hash_thing_ = |hasher_: &BuildHasherDefault<FxHasher>, it_: &SymbolProxy| {
|
let hasher_ = dashmap_.hasher().clone();
|
||||||
let mut hasher_ = std::hash::BuildHasher::build_hasher(hasher_);
|
let hash_one = |it_: &str| hasher_.hash_one(it_);
|
||||||
it_.hash(&mut hasher_);
|
|
||||||
hasher_.finish()
|
|
||||||
};
|
|
||||||
{
|
{
|
||||||
$(
|
$(
|
||||||
|
let s = stringify!($name);
|
||||||
let proxy_ = SymbolProxy($name.repr);
|
let hash_ = hash_one(s);
|
||||||
let hash_ = hash_thing_(dashmap_.hasher(), &proxy_);
|
|
||||||
let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
|
let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
|
||||||
dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(()));
|
dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($name, SharedValue::new(())), |(x, _)| hash_one(x.as_str()));
|
||||||
)*
|
)*
|
||||||
$(
|
$(
|
||||||
|
let s = $value;
|
||||||
let proxy_ = SymbolProxy($alias.repr);
|
let hash_ = hash_one(s);
|
||||||
let hash_ = hash_thing_(dashmap_.hasher(), &proxy_);
|
|
||||||
let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
|
let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
|
||||||
dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(()));
|
dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($alias, SharedValue::new(())), |(x, _)| hash_one(x.as_str()));
|
||||||
)*
|
)*
|
||||||
}
|
}
|
||||||
dashmap_
|
dashmap_
|
||||||
|
|
|
||||||
|
|
@ -132,7 +132,7 @@ pub struct AstIdMap {
|
||||||
/// Maps stable id to unstable ptr.
|
/// Maps stable id to unstable ptr.
|
||||||
arena: Arena<SyntaxNodePtr>,
|
arena: Arena<SyntaxNodePtr>,
|
||||||
/// Reverse: map ptr to id.
|
/// Reverse: map ptr to id.
|
||||||
map: hashbrown::HashMap<Idx<SyntaxNodePtr>, (), ()>,
|
map: hashbrown::HashTable<Idx<SyntaxNodePtr>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for AstIdMap {
|
impl fmt::Debug for AstIdMap {
|
||||||
|
|
@ -169,13 +169,13 @@ impl AstIdMap {
|
||||||
TreeOrder::DepthFirst
|
TreeOrder::DepthFirst
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
|
res.map = hashbrown::HashTable::with_capacity(res.arena.len());
|
||||||
for (idx, ptr) in res.arena.iter() {
|
for (idx, ptr) in res.arena.iter() {
|
||||||
let hash = hash_ptr(ptr);
|
let hash = hash_ptr(ptr);
|
||||||
match res.map.raw_entry_mut().from_hash(hash, |idx2| *idx2 == idx) {
|
match res.map.entry(hash, |&idx2| idx2 == idx, |&idx| hash_ptr(&res.arena[idx])) {
|
||||||
hashbrown::hash_map::RawEntryMut::Occupied(_) => unreachable!(),
|
hashbrown::hash_table::Entry::Occupied(_) => unreachable!(),
|
||||||
hashbrown::hash_map::RawEntryMut::Vacant(entry) => {
|
hashbrown::hash_table::Entry::Vacant(entry) => {
|
||||||
entry.insert_with_hasher(hash, idx, (), |&idx| hash_ptr(&res.arena[idx]));
|
entry.insert(idx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -196,8 +196,8 @@ impl AstIdMap {
|
||||||
pub fn ast_id_for_ptr<N: AstIdNode>(&self, ptr: AstPtr<N>) -> FileAstId<N> {
|
pub fn ast_id_for_ptr<N: AstIdNode>(&self, ptr: AstPtr<N>) -> FileAstId<N> {
|
||||||
let ptr = ptr.syntax_node_ptr();
|
let ptr = ptr.syntax_node_ptr();
|
||||||
let hash = hash_ptr(&ptr);
|
let hash = hash_ptr(&ptr);
|
||||||
match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
|
match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
|
||||||
Some((&raw, &())) => FileAstId {
|
Some(&raw) => FileAstId {
|
||||||
raw: ErasedFileAstId(raw.into_raw().into_u32()),
|
raw: ErasedFileAstId(raw.into_raw().into_u32()),
|
||||||
covariant: PhantomData,
|
covariant: PhantomData,
|
||||||
},
|
},
|
||||||
|
|
@ -221,8 +221,8 @@ impl AstIdMap {
|
||||||
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
||||||
let ptr = SyntaxNodePtr::new(item);
|
let ptr = SyntaxNodePtr::new(item);
|
||||||
let hash = hash_ptr(&ptr);
|
let hash = hash_ptr(&ptr);
|
||||||
match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
|
match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
|
||||||
Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()),
|
Some(&idx) => ErasedFileAstId(idx.into_raw().into_u32()),
|
||||||
None => panic!(
|
None => panic!(
|
||||||
"Can't find {:?} in AstIdMap:\n{:?}\n source text: {}",
|
"Can't find {:?} in AstIdMap:\n{:?}\n source text: {}",
|
||||||
item,
|
item,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue