Fix clippy issues

This commit is contained in:
andrzej.gluszak 2022-08-24 18:43:29 +02:00
parent dc696726cf
commit 73102b1e8e
36 changed files with 69 additions and 97 deletions

1
.gitignore vendored
View file

@ -3,3 +3,4 @@
Cargo.lock Cargo.lock
TAGS TAGS
nikom nikom
.idea

View file

@ -3,6 +3,7 @@ use std::sync::{Arc, Mutex};
use salsa::DebugWithDb; use salsa::DebugWithDb;
// ANCHOR: db_struct // ANCHOR: db_struct
#[derive(Default)]
#[salsa::db(crate::Jar)] #[salsa::db(crate::Jar)]
pub(crate) struct Database { pub(crate) struct Database {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
@ -34,17 +35,6 @@ impl Database {
} }
} }
// ANCHOR: default_impl
impl Default for Database {
fn default() -> Self {
Self {
storage: Default::default(),
logs: None,
}
}
}
// ANCHOR_END: default_impl
// ANCHOR: db_impl // ANCHOR: db_impl
impl salsa::Database for Database { impl salsa::Database for Database {
fn salsa_runtime(&self) -> &salsa::Runtime { fn salsa_runtime(&self) -> &salsa::Runtime {
@ -54,15 +44,11 @@ impl salsa::Database for Database {
fn salsa_event(&self, event: salsa::Event) { fn salsa_event(&self, event: salsa::Event) {
// Log interesting events, if logging is enabled // Log interesting events, if logging is enabled
if let Some(logs) = &self.logs { if let Some(logs) = &self.logs {
match event.kind { // don't log boring events
salsa::EventKind::WillExecute { .. } => { if let salsa::EventKind::WillExecute { .. } = event.kind {
logs.lock() logs.lock()
.unwrap() .unwrap()
.push(format!("Event: {:?}", event.debug(self))); .push(format!("Event: {:?}", event.debug(self)));
}
_ => {
// don't log boring events
}
} }
} }
} }

View file

@ -1,6 +1,7 @@
#![allow(clippy::needless_borrow)]
use derive_new::new; use derive_new::new;
use ordered_float::OrderedFloat; use ordered_float::OrderedFloat;
use salsa::debug::DebugWithDb;
// ANCHOR: input // ANCHOR: input
#[salsa::input] #[salsa::input]

View file

@ -25,7 +25,7 @@ pub fn parse_statements(db: &dyn crate::Db, source: SourceProgram) -> Program {
parser.skip_whitespace(); parser.skip_whitespace();
// If there are no more tokens, break // If there are no more tokens, break
if let None = parser.peek() { if parser.peek().is_none() {
break; break;
} }
@ -166,9 +166,9 @@ impl Parser<'_> {
} }
fn low_op(&mut self) -> Option<Op> { fn low_op(&mut self) -> Option<Op> {
if let Some(_) = self.ch('+') { if self.ch('+').is_some() {
Some(Op::Add) Some(Op::Add)
} else if let Some(_) = self.ch('-') { } else if self.ch('-').is_some() {
Some(Op::Subtract) Some(Op::Subtract)
} else { } else {
None None
@ -183,9 +183,9 @@ impl Parser<'_> {
} }
fn high_op(&mut self) -> Option<Op> { fn high_op(&mut self) -> Option<Op> {
if let Some(_) = self.ch('*') { if self.ch('*').is_some() {
Some(Op::Multiply) Some(Op::Multiply)
} else if let Some(_) = self.ch('/') { } else if self.ch('/').is_some() {
Some(Op::Divide) Some(Op::Divide)
} else { } else {
None None
@ -217,7 +217,7 @@ impl Parser<'_> {
fn parse_expression2(&mut self) -> Option<Expression> { fn parse_expression2(&mut self) -> Option<Expression> {
let start_position = self.skip_whitespace(); let start_position = self.skip_whitespace();
if let Some(w) = self.word() { if let Some(w) = self.word() {
if let Some(_) = self.ch('(') { if self.ch('(').is_some() {
let f = FunctionId::new(self.db, w); let f = FunctionId::new(self.db, w);
let args = self.parse_expressions()?; let args = self.parse_expressions()?;
self.ch(')')?; self.ch(')')?;
@ -237,7 +237,7 @@ impl Parser<'_> {
self.span_from(start_position), self.span_from(start_position),
ExpressionData::Number(OrderedFloat::from(n)), ExpressionData::Number(OrderedFloat::from(n)),
)) ))
} else if let Some(_) = self.ch('(') { } else if self.ch('(').is_some() {
let expr = self.parse_expression()?; let expr = self.parse_expression()?;
self.ch(')')?; self.ch(')')?;
Some(expr) Some(expr)
@ -297,11 +297,9 @@ impl Parser<'_> {
// In this loop, if we consume any characters, we always // In this loop, if we consume any characters, we always
// return `Some`. // return `Some`.
let mut s = String::new(); let mut s = String::new();
let position = self.position; let _position = self.position;
while let Some(ch) = self.peek() { while let Some(ch) = self.peek() {
if ch.is_alphabetic() || ch == '_' { if ch.is_alphabetic() || ch == '_' || (!s.is_empty() && ch.is_numeric()) {
s.push(ch);
} else if !s.is_empty() && ch.is_numeric() {
s.push(ch); s.push(ch);
} else { } else {
break; break;
@ -321,7 +319,7 @@ impl Parser<'_> {
/// ///
/// Even on failure, only skips whitespace. /// Even on failure, only skips whitespace.
fn number(&mut self) -> Option<f64> { fn number(&mut self) -> Option<f64> {
let start_position = self.skip_whitespace(); let _start_position = self.skip_whitespace();
self.probe(|this| { self.probe(|this| {
// 👆 We need the call to `probe` here because we could consume // 👆 We need the call to `probe` here because we could consume
@ -329,9 +327,7 @@ impl Parser<'_> {
// still return `None`. // still return `None`.
let mut s = String::new(); let mut s = String::new();
while let Some(ch) = this.peek() { while let Some(ch) = this.peek() {
if ch.is_numeric() { if ch.is_numeric() || ch == '.' {
s.push(ch);
} else if ch == '.' {
s.push(ch); s.push(ch);
} else { } else {
break; break;

View file

@ -58,7 +58,7 @@ impl CheckExpression<'_> {
} }
} }
crate::ir::ExpressionData::Call(f, args) => { crate::ir::ExpressionData::Call(f, args) => {
if let None = self.find_function(*f) { if self.find_function(*f).is_none() {
self.report_error( self.report_error(
expression.span, expression.span,
format!("the function `{}` is not declared", f.text(self.db)), format!("the function `{}` is not declared", f.text(self.db)),
@ -91,8 +91,6 @@ fn check_string(
expected_diagnostics: expect_test::Expect, expected_diagnostics: expect_test::Expect,
edits: &[(&str, expect_test::Expect, expect_test::Expect)], edits: &[(&str, expect_test::Expect, expect_test::Expect)],
) { ) {
use salsa::debug::DebugWithDb;
use crate::{db::Database, ir::SourceProgram, parser::parse_statements}; use crate::{db::Database, ir::SourceProgram, parser::parse_statements};
// Create the database // Create the database

View file

@ -127,7 +127,7 @@ pub(crate) fn jar_impl(
pub(crate) fn jar_struct(input: &ItemStruct) -> ItemStruct { pub(crate) fn jar_struct(input: &ItemStruct) -> ItemStruct {
let mut output_struct = input.clone(); let mut output_struct = input.clone();
output_struct.fields = generate_fields(&input).into(); output_struct.fields = generate_fields(input).into();
if output_struct.semi_token.is_none() { if output_struct.semi_token.is_none() {
output_struct.semi_token = Some(Token![;](input.struct_token.span)); output_struct.semi_token = Some(Token![;](input.struct_token.span));
} }

View file

@ -102,7 +102,7 @@ impl<A: AllowedOptions> Options<A> {
return jar_ty.clone(); return jar_ty.clone();
} }
return parse_quote! {crate::Jar}; parse_quote! {crate::Jar}
} }
pub(crate) fn should_backdate(&self) -> bool { pub(crate) fn should_backdate(&self) -> bool {

View file

@ -365,6 +365,7 @@ impl SalsaStruct {
} }
} }
#[allow(clippy::type_complexity)]
pub(crate) const FIELD_OPTION_ATTRIBUTES: &[(&str, fn(&syn::Attribute, &mut SalsaField))] = &[ pub(crate) const FIELD_OPTION_ATTRIBUTES: &[(&str, fn(&syn::Attribute, &mut SalsaField))] = &[
("id", |_, ef| ef.has_id_attr = true), ("id", |_, ef| ef.has_id_attr = true),
("return_ref", |_, ef| ef.has_ref_attr = true), ("return_ref", |_, ef| ef.has_ref_attr = true),

View file

@ -10,7 +10,7 @@ pub(crate) fn tracked(
syn::Item::Fn(item) => crate::tracked_fn::tracked(args, item), syn::Item::Fn(item) => crate::tracked_fn::tracked(args, item),
_ => syn::Error::new( _ => syn::Error::new(
item.span(), item.span(),
&format!("tracked can be applied to structs and functions only"), &"tracked can be applied to structs and functions only".to_string(),
) )
.into_compile_error() .into_compile_error()
.into(), .into(),

View file

@ -16,7 +16,7 @@ pub trait Accumulator {
type Data: Clone; type Data: Clone;
type Jar; type Jar;
fn accumulator_ingredient<'db, Db>(db: &'db Db) -> &'db AccumulatorIngredient<Self::Data> fn accumulator_ingredient<Db>(db: &Db) -> &AccumulatorIngredient<Self::Data>
where where
Db: ?Sized + HasJar<Self::Jar>; Db: ?Sized + HasJar<Self::Jar>;
} }

View file

@ -21,7 +21,7 @@ where
// consumers must be aware of. Becoming *more* durable // consumers must be aware of. Becoming *more* durable
// is not. See the test `constant_to_non_constant`. // is not. See the test `constant_to_non_constant`.
if revisions.durability >= old_memo.revisions.durability if revisions.durability >= old_memo.revisions.durability
&& C::should_backdate_value(old_value, &value) && C::should_backdate_value(old_value, value)
{ {
log::debug!( log::debug!(
"value is equal, back-dating to {:?}", "value is equal, back-dating to {:?}",

View file

@ -24,6 +24,7 @@ where
// two list are in sorted order, we can merge them in linear time. // two list are in sorted order, we can merge them in linear time.
while let (Some(&old_output), Some(&new_output)) = (old_outputs.peek(), new_outputs.peek()) while let (Some(&old_output), Some(&new_output)) = (old_outputs.peek(), new_outputs.peek())
{ {
#[allow(clippy::comparison_chain)]
if old_output < new_output { if old_output < new_output {
// Output that was generated but is no longer. // Output that was generated but is no longer.
Self::report_stale_output(db, key, old_output); Self::report_stale_output(db, key, old_output);

View file

@ -36,7 +36,7 @@ where
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: runtime.id(), runtime_id: runtime.id(),
kind: EventKind::WillExecute { kind: EventKind::WillExecute {
database_key: database_key_index.into(), database_key: database_key_index,
}, },
}); });
@ -87,7 +87,7 @@ where
// old value. // old value.
if let Some(old_memo) = &opt_old_memo { if let Some(old_memo) = &opt_old_memo {
self.backdate_if_appropriate(old_memo, &mut revisions, &value); self.backdate_if_appropriate(old_memo, &mut revisions, &value);
self.diff_outputs(db, database_key_index, &old_memo, &revisions); self.diff_outputs(db, database_key_index, old_memo, &revisions);
} }
let value = self let value = self

View file

@ -75,14 +75,12 @@ where
// This time we can do a *deep* verify. Because this can recurse, don't hold the arcswap guard. // This time we can do a *deep* verify. Because this can recurse, don't hold the arcswap guard.
let opt_old_memo = self.memo_map.get(key).map(Guard::into_inner); let opt_old_memo = self.memo_map.get(key).map(Guard::into_inner);
if let Some(old_memo) = &opt_old_memo { if let Some(old_memo) = &opt_old_memo {
if old_memo.value.is_some() { if old_memo.value.is_some() && self.deep_verify_memo(db, old_memo, &active_query) {
if self.deep_verify_memo(db, old_memo, &active_query) { let value = unsafe {
let value = unsafe { // Unsafety invariant: memo is present in memo_map.
// Unsafety invariant: memo is present in memo_map. self.extend_memo_lifetime(old_memo).unwrap()
self.extend_memo_lifetime(old_memo).unwrap() };
}; return Some(old_memo.revisions.stamped_value(value));
return Some(old_memo.revisions.stamped_value(value));
}
} }
} }

View file

@ -61,7 +61,6 @@ impl<K: AsId, V> MemoMap<K, V> {
// assigned as output of another query // assigned as output of another query
// or those with untracked inputs // or those with untracked inputs
// as their values cannot be reconstructed. // as their values cannot be reconstructed.
return;
} }
QueryOrigin::Derived(_) => { QueryOrigin::Derived(_) => {
@ -123,7 +122,7 @@ impl<V> Memo<V> {
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: runtime.id(), runtime_id: runtime.id(),
kind: EventKind::DidValidateMemoizedValue { kind: EventKind::DidValidateMemoizedValue {
database_key: database_key_index.into(), database_key: database_key_index,
}, },
}); });

View file

@ -88,6 +88,5 @@ impl AsId for () {
fn from_id(id: Id) -> Self { fn from_id(id: Id) -> Self {
assert_eq!(0, id.as_u32()); assert_eq!(0, id.as_u32());
()
} }
} }

View file

@ -257,6 +257,7 @@ pub struct IdentityInterner<Id: AsId> {
} }
impl<Id: AsId> IdentityInterner<Id> { impl<Id: AsId> IdentityInterner<Id> {
#[allow(clippy::new_without_default)]
pub fn new() -> Self { pub fn new() -> Self {
IdentityInterner { data: PhantomData } IdentityInterner { data: PhantomData }
} }

View file

@ -51,6 +51,7 @@ pub struct Routes<DB: HasJars> {
/// Vector indexed by ingredient index. Yields the `DynRoute`, /// Vector indexed by ingredient index. Yields the `DynRoute`,
/// a function which can be applied to the `DB::Jars` to yield /// a function which can be applied to the `DB::Jars` to yield
/// the `dyn Ingredient. /// the `dyn Ingredient.
#[allow(clippy::type_complexity)]
routes: Vec<(Box<DynRoute<DB>>, Box<DynMutRoute<DB>>)>, routes: Vec<(Box<DynRoute<DB>>, Box<DynMutRoute<DB>>)>,
/// Indices of routes which need a 'reset' call. /// Indices of routes which need a 'reset' call.

View file

@ -55,6 +55,8 @@ pub(crate) struct StampedValue<V> {
} }
impl<V> StampedValue<V> { impl<V> StampedValue<V> {
// FIXME: Use or remove this.
#[allow(dead_code)]
pub(crate) fn merge_revision_info<U>(&mut self, other: &StampedValue<U>) { pub(crate) fn merge_revision_info<U>(&mut self, other: &StampedValue<U>) {
self.durability = self.durability.min(other.durability); self.durability = self.durability.min(other.durability);
self.changed_at = self.changed_at.max(other.changed_at); self.changed_at = self.changed_at.max(other.changed_at);
@ -284,7 +286,7 @@ impl Runtime {
runtime_id: self.id(), runtime_id: self.id(),
kind: EventKind::WillBlockOn { kind: EventKind::WillBlockOn {
other_runtime_id: other_id, other_runtime_id: other_id,
database_key: database_key.into(), database_key,
}, },
}); });

View file

@ -104,7 +104,7 @@ impl ActiveQuery {
self.dependencies self.dependencies
.iter() .iter()
.copied() .copied()
.chain(self.outputs.iter().map(|&o| o.into())) .chain(self.outputs.iter().copied())
.collect() .collect()
}; };

View file

@ -200,7 +200,7 @@ impl LocalState {
pub(super) fn is_output(&self, entity: DatabaseKeyIndex) -> bool { pub(super) fn is_output(&self, entity: DatabaseKeyIndex) -> bool {
self.with_query_stack(|stack| { self.with_query_stack(|stack| {
if let Some(top_query) = stack.last_mut() { if let Some(top_query) = stack.last_mut() {
top_query.is_output(entity.into()) top_query.is_output(entity)
} else { } else {
false false
} }
@ -259,6 +259,8 @@ impl LocalState {
/// Update the top query on the stack to act as though it read a value /// Update the top query on the stack to act as though it read a value
/// of durability `durability` which changed in `revision`. /// of durability `durability` which changed in `revision`.
// FIXME: Use or remove this.
#[allow(dead_code)]
pub(super) fn report_synthetic_read(&self, durability: Durability, revision: Revision) { pub(super) fn report_synthetic_read(&self, durability: Durability, revision: Revision) {
self.with_query_stack(|stack| { self.with_query_stack(|stack| {
if let Some(top_query) = stack.last_mut() { if let Some(top_query) = stack.last_mut() {

View file

@ -183,9 +183,9 @@ pub trait DbWithJar<J>: HasJar<J> + Database {
} }
pub trait JarFromJars<J>: HasJars { pub trait JarFromJars<J>: HasJars {
fn jar_from_jars<'db>(jars: &Self::Jars) -> &J; fn jar_from_jars(jars: &Self::Jars) -> &J;
fn jar_from_jars_mut<'db>(jars: &mut Self::Jars) -> &mut J; fn jar_from_jars_mut(jars: &mut Self::Jars) -> &mut J;
} }
pub trait HasJar<J> { pub trait HasJar<J> {

View file

@ -152,11 +152,10 @@ where
fn mark_validated_output( fn mark_validated_output(
&self, &self,
_db: &DB, _db: &DB,
executor: DatabaseKeyIndex, _executor: DatabaseKeyIndex,
output_key: Option<crate::Id>, _output_key: Option<crate::Id>,
) { ) {
// FIXME // FIXME
drop((executor, output_key));
} }
fn remove_stale_output( fn remove_stale_output(

View file

@ -206,7 +206,7 @@ pub(crate) fn database(args: TokenStream, input: TokenStream) -> TokenStream {
if std::env::var("SALSA_DUMP").is_ok() { if std::env::var("SALSA_DUMP").is_ok() {
println!("~~~ database_storage"); println!("~~~ database_storage");
println!("{}", output.to_string()); println!("{}", output);
println!("~~~ database_storage"); println!("~~~ database_storage");
} }

View file

@ -658,7 +658,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
if std::env::var("SALSA_DUMP").is_ok() { if std::env::var("SALSA_DUMP").is_ok() {
println!("~~~ query_group"); println!("~~~ query_group");
println!("{}", output.to_string()); println!("{}", output);
println!("~~~ query_group"); println!("~~~ query_group");
} }

View file

@ -18,7 +18,7 @@ pub trait HasLogger {
/// clearing the logged events. This takes `&mut self` because /// clearing the logged events. This takes `&mut self` because
/// it is meant to be run from outside any tracked functions. /// it is meant to be run from outside any tracked functions.
fn assert_logs(&mut self, expected: expect_test::Expect) { fn assert_logs(&mut self, expected: expect_test::Expect) {
let logs = std::mem::replace(&mut *self.logger().logs.lock().unwrap(), vec![]); let logs = std::mem::take(&mut *self.logger().logs.lock().unwrap());
expected.assert_eq(&format!("{:#?}", logs)); expected.assert_eq(&format!("{:#?}", logs));
} }
@ -26,7 +26,7 @@ pub trait HasLogger {
/// clearing the logged events. This takes `&mut self` because /// clearing the logged events. This takes `&mut self` because
/// it is meant to be run from outside any tracked functions. /// it is meant to be run from outside any tracked functions.
fn assert_logs_len(&mut self, expected: usize) { fn assert_logs_len(&mut self, expected: usize) {
let logs = std::mem::replace(&mut *self.logger().logs.lock().unwrap(), vec![]); let logs = std::mem::take(&mut *self.logger().logs.lock().unwrap());
assert_eq!(logs.len(), expected); assert_eq!(logs.len(), expected);
} }
} }

View file

@ -96,7 +96,7 @@ fn basic() {
// Creates 3 tracked structs // Creates 3 tracked structs
let input = MyInput::new(&mut db, 3); let input = MyInput::new(&mut db, 3);
assert_eq!(final_result(&db, input), 2 * 2 + 1 * 2 + 0 * 2); assert_eq!(final_result(&db, input), 2 * 2 + 2);
db.assert_logs(expect![[r#" db.assert_logs(expect![[r#"
[ [
"final_result(MyInput(Id { value: 1 }))", "final_result(MyInput(Id { value: 1 }))",
@ -118,7 +118,7 @@ fn basic() {
// * the `copy_field` result // * the `copy_field` result
input.set_field(&mut db).to(2); input.set_field(&mut db).to(2);
assert_eq!(final_result(&db, input), 1 * 2 + 0 * 2); assert_eq!(final_result(&db, input), 2);
db.assert_logs(expect![[r#" db.assert_logs(expect![[r#"
[ [
"intermediate_result(MyInput(Id { value: 1 }))", "intermediate_result(MyInput(Id { value: 1 }))",

View file

@ -89,7 +89,7 @@ fn basic() {
// Creates 3 tracked structs // Creates 3 tracked structs
let input = MyInput::new(&mut db, 3); let input = MyInput::new(&mut db, 3);
assert_eq!(final_result(&db, input), 2 * 2 + 1 * 2 + 0 * 2); assert_eq!(final_result(&db, input), 2 * 2 + 2);
db.assert_logs(expect![[r#" db.assert_logs(expect![[r#"
[ [
"final_result(MyInput(Id { value: 1 }))", "final_result(MyInput(Id { value: 1 }))",
@ -104,7 +104,7 @@ fn basic() {
// * the struct's field // * the struct's field
// * the `contribution_from_struct` result // * the `contribution_from_struct` result
input.set_field(&mut db).to(2); input.set_field(&mut db).to(2);
assert_eq!(final_result(&db, input), 1 * 2 + 0 * 2); assert_eq!(final_result(&db, input), 2);
db.assert_logs(expect![[r#" db.assert_logs(expect![[r#"
[ [
"intermediate_result(MyInput(Id { value: 1 }))", "intermediate_result(MyInput(Id { value: 1 }))",

View file

@ -45,7 +45,7 @@ impl<K, V> TableEntry<K, V> {
} }
} }
impl<'d, Q> DebugQueryTable for QueryTable<'_, Q> impl<Q> DebugQueryTable for QueryTable<'_, Q>
where where
Q: Query, Q: Query,
Q::Storage: QueryStorageOps<Q>, Q::Storage: QueryStorageOps<Q>,

View file

@ -107,7 +107,7 @@ where
DatabaseKeyIndex { DatabaseKeyIndex {
group_index: self.group_index, group_index: self.group_index,
query_index: Q::QUERY_INDEX, query_index: Q::QUERY_INDEX,
key_index: key_index, key_index,
} }
} }

View file

@ -49,11 +49,7 @@ where
K: Borrow<S>, K: Borrow<S>,
{ {
// Common case: get an existing key // Common case: get an existing key
if let Some(v) = self.index_map.get(key) { self.index_map.get(key).map(|v| *v)
Some(*v)
} else {
None
}
} }
pub(super) fn key_for_key_index(&self, key_index: DerivedKeyIndex) -> K { pub(super) fn key_for_key_index(&self, key_index: DerivedKeyIndex) -> K {

View file

@ -39,7 +39,7 @@ impl<V> MemoMap<V> {
// but I can't see a clean way to encapsulate it otherwise. I suppose // but I can't see a clean way to encapsulate it otherwise. I suppose
// it could take a closure, but it seems silly. // it could take a closure, but it seems silly.
match self.map.entry(key) { match self.map.entry(key) {
Entry::Vacant(_) => return, Entry::Vacant(_) => (),
Entry::Occupied(entry) => { Entry::Occupied(entry) => {
let memo = entry.get().load(); let memo = entry.get().load();

View file

@ -157,10 +157,9 @@ where
slots slots
.values() .values()
.map(|slot| { .map(|slot| {
let value = match &*slot.stamped_value.read() { let value = (*slot.stamped_value.read())
Some(stamped_value) => Some(stamped_value.value.clone()), .as_ref()
None => None, .map(|stamped_value| stamped_value.value.clone());
};
TableEntry::new(slot.key.clone(), value) TableEntry::new(slot.key.clone(), value)
}) })
.collect() .collect()
@ -239,7 +238,7 @@ where
// (Otherwise, someone else might write a *newer* revision // (Otherwise, someone else might write a *newer* revision
// into the same cell while we block on the lock.) // into the same cell while we block on the lock.)
let stamped_value = StampedValue { let stamped_value = StampedValue {
value: value, value,
durability, durability,
changed_at: next_revision, changed_at: next_revision,
}; };

View file

@ -49,6 +49,7 @@ struct Error {
cycle: Vec<String>, cycle: Vec<String>,
} }
#[derive(Default)]
#[salsa::database(GroupStruct)] #[salsa::database(GroupStruct)]
struct DatabaseImpl { struct DatabaseImpl {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
@ -64,16 +65,6 @@ impl ParallelDatabase for DatabaseImpl {
} }
} }
impl Default for DatabaseImpl {
fn default() -> Self {
let res = DatabaseImpl {
storage: salsa::Storage::default(),
};
res
}
}
/// The queries A, B, and C in `Database` can be configured /// The queries A, B, and C in `Database` can be configured
/// to invoke one another in arbitrary ways using this /// to invoke one another in arbitrary ways using this
/// enum. /// enum.

View file

@ -39,6 +39,7 @@ fn c(db: &dyn QueryGroup, x: u32) -> u32 {
struct Database { struct Database {
storage: salsa::Storage<Self>, storage: salsa::Storage<Self>,
external_state: HashMap<u32, u32>, external_state: HashMap<u32, u32>,
#[allow(clippy::type_complexity)]
on_event: Option<Box<dyn Fn(&Database, salsa::Event)>>, on_event: Option<Box<dyn Fn(&Database, salsa::Event)>>,
} }

View file

@ -46,7 +46,7 @@ impl<T> WithValue<T> for Cell<T> {
fn with_value<R>(&self, value: T, closure: impl FnOnce() -> R) -> R { fn with_value<R>(&self, value: T, closure: impl FnOnce() -> R) -> R {
let old_value = self.replace(value); let old_value = self.replace(value);
let result = catch_unwind(AssertUnwindSafe(|| closure())); let result = catch_unwind(AssertUnwindSafe(closure));
self.set(old_value); self.set(old_value);