mirror of
https://github.com/salsa-rs/salsa.git
synced 2025-02-02 09:46:06 +00:00
Merge #343
343: Whoops r=nikomatsakis a=nikomatsakis Revert a bunch of commits I apparently pushed by accident! Co-authored-by: Niko Matsakis <niko@alum.mit.edu>
This commit is contained in:
commit
367c0bfe11
23 changed files with 72 additions and 635 deletions
|
@ -136,16 +136,10 @@ fn has_jars_dyn_impl(input: &syn::ItemStruct, storage: &syn::Ident) -> syn::Item
|
|||
fn inputs(
|
||||
&self,
|
||||
index: salsa::DatabaseKeyIndex,
|
||||
) -> Option<salsa::runtime::local_state::QueryEdges> {
|
||||
) -> Option<salsa::runtime::local_state::QueryInputs> {
|
||||
let ingredient = self.#storage.ingredient(index.ingredient_index());
|
||||
ingredient.inputs(index.key_index())
|
||||
}
|
||||
|
||||
fn remove_stale_output(&self, executor: salsa::DatabaseKeyIndex, stale_output: salsa::key::DependencyIndex) {
|
||||
let ingredient = self.#storage.ingredient(stale_output.ingredient_index());
|
||||
ingredient.remove_stale_output(executor, stale_output.key_index());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -415,7 +415,7 @@ fn specify_fn(
|
|||
|
||||
let (__jar, __runtime) = <_ as salsa::storage::HasJar<#jar_ty>>::jar(#db_var);
|
||||
let __ingredients = <_ as salsa::storage::HasIngredientsFor<#config_ty>>::ingredient(__jar);
|
||||
__ingredients.function.specify_and_record(#db_var, #(#arg_names,)* #value_arg)
|
||||
__ingredients.function.specify(#db_var, #(#arg_names,)* #value_arg)
|
||||
}
|
||||
},
|
||||
}))
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
|||
hash::FxDashMap,
|
||||
ingredient::{Ingredient, MutIngredient},
|
||||
key::DependencyIndex,
|
||||
runtime::{local_state::QueryEdges, StampedValue},
|
||||
runtime::{local_state::QueryInputs, StampedValue},
|
||||
storage::HasJar,
|
||||
DatabaseKeyIndex, Durability, IngredientIndex, Revision, Runtime,
|
||||
};
|
||||
|
@ -78,13 +78,9 @@ where
|
|||
CycleRecoveryStrategy::Panic
|
||||
}
|
||||
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryEdges> {
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryInputs> {
|
||||
None
|
||||
}
|
||||
|
||||
fn remove_stale_output(&self, executor: DatabaseKeyIndex, stale_output_key: Option<crate::Id>) {
|
||||
// FIXME
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB: ?Sized, Data> MutIngredient<DB> for AccumulatorIngredient<Data>
|
||||
|
|
|
@ -7,8 +7,8 @@ pub trait Database: HasJarsDyn + AsSalsaDatabase {
|
|||
///
|
||||
/// By default, the event is logged at level debug using
|
||||
/// the standard `log` facade.
|
||||
fn salsa_event(&self, event: Event) {
|
||||
log::debug!("salsa_event: {:?}", event.debug(self));
|
||||
fn salsa_event(&self, event_fn: Event) {
|
||||
log::debug!("salsa_event: {:?}", event_fn.debug(self));
|
||||
}
|
||||
|
||||
fn salsa_runtime(&self) -> &Runtime;
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
use crate::{
|
||||
debug::DebugWithDb, key::DependencyIndex, runtime::RuntimeId, Database, DatabaseKeyIndex,
|
||||
};
|
||||
use crate::{debug::DebugWithDb, runtime::RuntimeId, Database, DatabaseKeyIndex};
|
||||
use std::fmt;
|
||||
|
||||
/// The `Event` struct identifies various notable things that can
|
||||
|
@ -76,15 +74,6 @@ pub enum EventKind {
|
|||
/// Indicates that `unwind_if_cancelled` was called and salsa will check if
|
||||
/// the current revision has been cancelled.
|
||||
WillCheckCancellation,
|
||||
|
||||
/// Discovered that a query used to output a given output but no longer does.
|
||||
WillDiscardStaleOutput {
|
||||
/// Key for the query that is executing and which no longer outputs the given value.
|
||||
execute_key: DatabaseKeyIndex,
|
||||
|
||||
/// Key for the query that is no longer output
|
||||
output_key: DependencyIndex,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Debug for EventKind {
|
||||
|
@ -107,14 +96,6 @@ impl fmt::Debug for EventKind {
|
|||
.field("database_key", database_key)
|
||||
.finish(),
|
||||
EventKind::WillCheckCancellation => fmt.debug_struct("WillCheckCancellation").finish(),
|
||||
EventKind::WillDiscardStaleOutput {
|
||||
execute_key,
|
||||
output_key,
|
||||
} => fmt
|
||||
.debug_struct("WillDiscardStaleOutput")
|
||||
.field("execute_key", &execute_key)
|
||||
.field("output_key", &output_key)
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,14 +123,6 @@ where
|
|||
.field("database_key", &database_key.debug(db))
|
||||
.finish(),
|
||||
EventKind::WillCheckCancellation => fmt.debug_struct("WillCheckCancellation").finish(),
|
||||
EventKind::WillDiscardStaleOutput {
|
||||
execute_key,
|
||||
output_key,
|
||||
} => fmt
|
||||
.debug_struct("WillDiscardStaleOutput")
|
||||
.field("execute_key", &execute_key.debug(db))
|
||||
.field("output_key", &output_key.debug(db))
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
ingredient::MutIngredient,
|
||||
jar::Jar,
|
||||
key::{DatabaseKeyIndex, DependencyIndex},
|
||||
runtime::local_state::QueryEdges,
|
||||
runtime::local_state::QueryInputs,
|
||||
salsa_struct::SalsaStructInDb,
|
||||
Cycle, DbWithJar, Id, Revision,
|
||||
};
|
||||
|
@ -17,7 +17,6 @@ use super::{ingredient::Ingredient, routes::IngredientIndex, AsId};
|
|||
|
||||
mod accumulated;
|
||||
mod backdate;
|
||||
mod diff_outputs;
|
||||
mod execute;
|
||||
mod fetch;
|
||||
mod inputs;
|
||||
|
@ -199,15 +198,10 @@ where
|
|||
C::CYCLE_STRATEGY
|
||||
}
|
||||
|
||||
fn inputs(&self, key_index: Id) -> Option<QueryEdges> {
|
||||
fn inputs(&self, key_index: Id) -> Option<QueryInputs> {
|
||||
let key = C::key_from_id(key_index);
|
||||
self.inputs(key)
|
||||
}
|
||||
|
||||
fn remove_stale_output(&self, executor: DatabaseKeyIndex, stale_output_key: Option<crate::Id>) {
|
||||
let stale_output_key = C::key_from_id(stale_output_key.unwrap());
|
||||
// FIXME
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB, C> MutIngredient<DB> for FunctionIngredient<C>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::{
|
||||
hash::FxHashSet,
|
||||
key::DependencyIndex,
|
||||
runtime::local_state::QueryEdges,
|
||||
runtime::local_state::QueryInputs,
|
||||
storage::{HasJar, HasJarsDyn},
|
||||
Database, DatabaseKeyIndex,
|
||||
};
|
||||
|
@ -55,7 +55,7 @@ impl Stack {
|
|||
self.v.pop()
|
||||
}
|
||||
|
||||
fn extend(&mut self, inputs: Option<QueryEdges>) {
|
||||
fn extend(&mut self, inputs: Option<QueryInputs>) {
|
||||
let inputs = match inputs {
|
||||
None => return,
|
||||
Some(v) => v,
|
||||
|
@ -64,7 +64,7 @@ impl Stack {
|
|||
for DependencyIndex {
|
||||
ingredient_index,
|
||||
key_index,
|
||||
} in inputs.inputs().iter().copied()
|
||||
} in inputs.tracked.iter().copied()
|
||||
{
|
||||
if let Some(key_index) = key_index {
|
||||
let i = DatabaseKeyIndex {
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
use crate::{
|
||||
key::DependencyIndex, runtime::local_state::QueryRevisions, storage::HasJarsDyn, Database,
|
||||
DatabaseKeyIndex, Event, EventKind,
|
||||
};
|
||||
|
||||
use super::{memo::Memo, Configuration, DynDb, FunctionIngredient};
|
||||
|
||||
impl<C> FunctionIngredient<C>
|
||||
where
|
||||
C: Configuration,
|
||||
{
|
||||
/// Compute the old and new outputs and invoke the `clear_stale_output` callback
|
||||
/// for each output that was generated before but is not generated now.
|
||||
pub(super) fn diff_outputs(
|
||||
&self,
|
||||
db: &DynDb<'_, C>,
|
||||
key: DatabaseKeyIndex,
|
||||
old_memo: &Memo<C::Value>,
|
||||
revisions: &QueryRevisions,
|
||||
) {
|
||||
let mut old_outputs = old_memo
|
||||
.revisions
|
||||
.edges
|
||||
.outputs()
|
||||
.iter()
|
||||
.copied()
|
||||
.peekable();
|
||||
let mut new_outputs = revisions.edges.outputs().iter().copied().peekable();
|
||||
|
||||
// two list are in sorted order, we can merge them in linear time.
|
||||
while let (Some(&old_output), Some(&new_output)) = (old_outputs.peek(), new_outputs.peek())
|
||||
{
|
||||
if old_output < new_output {
|
||||
// Output that was generated but is no longer.
|
||||
Self::report_stale_output(db, key, old_output);
|
||||
old_outputs.next();
|
||||
} else if new_output < old_output {
|
||||
// This is a new output that was not generated before.
|
||||
// No action needed.
|
||||
new_outputs.next();
|
||||
} else {
|
||||
// Output generated both times.
|
||||
old_outputs.next();
|
||||
new_outputs.next();
|
||||
}
|
||||
}
|
||||
|
||||
for old_output in old_outputs {
|
||||
Self::report_stale_output(db, key, old_output);
|
||||
}
|
||||
}
|
||||
|
||||
fn report_stale_output(db: &DynDb<'_, C>, key: DatabaseKeyIndex, output: DependencyIndex) {
|
||||
let runtime_id = db.salsa_runtime().id();
|
||||
db.salsa_event(Event {
|
||||
runtime_id,
|
||||
kind: EventKind::WillDiscardStaleOutput {
|
||||
execute_key: key,
|
||||
output_key: output,
|
||||
},
|
||||
});
|
||||
|
||||
db.remove_stale_output(key, output);
|
||||
}
|
||||
}
|
|
@ -87,7 +87,6 @@ where
|
|||
// old value.
|
||||
if let Some(old_memo) = &opt_old_memo {
|
||||
self.backdate_if_appropriate(old_memo, &mut revisions, &value);
|
||||
self.diff_outputs(db, database_key_index, &old_memo, &revisions);
|
||||
}
|
||||
|
||||
let value = self
|
||||
|
|
|
@ -93,7 +93,7 @@ where
|
|||
if let Some(memo) = self.memo_map.get(key) {
|
||||
// Careful: we can't evict memos with untracked inputs
|
||||
// as their values cannot be reconstructed.
|
||||
if memo.revisions.edges.untracked {
|
||||
if memo.revisions.inputs.untracked {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::runtime::local_state::QueryEdges;
|
||||
use crate::runtime::local_state::QueryInputs;
|
||||
|
||||
use super::{Configuration, FunctionIngredient};
|
||||
|
||||
|
@ -6,7 +6,7 @@ impl<C> FunctionIngredient<C>
|
|||
where
|
||||
C: Configuration,
|
||||
{
|
||||
pub(super) fn inputs(&self, key: C::Key) -> Option<QueryEdges> {
|
||||
self.memo_map.get(key).map(|m| m.revisions.edges.clone())
|
||||
pub(super) fn inputs(&self, key: C::Key) -> Option<QueryInputs> {
|
||||
self.memo_map.get(key).map(|m| m.revisions.inputs.clone())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -158,13 +158,13 @@ where
|
|||
return true;
|
||||
}
|
||||
|
||||
if old_memo.revisions.edges.untracked {
|
||||
if old_memo.revisions.inputs.untracked {
|
||||
// Untracked inputs? Have to assume that it changed.
|
||||
return false;
|
||||
}
|
||||
|
||||
let last_verified_at = old_memo.verified_at.load();
|
||||
for &input in old_memo.revisions.edges.inputs().iter() {
|
||||
for &input in old_memo.revisions.inputs.tracked.iter() {
|
||||
if db.maybe_changed_after(input, last_verified_at) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crossbeam::atomic::AtomicCell;
|
||||
|
||||
use crate::{
|
||||
runtime::local_state::{QueryEdges, QueryRevisions},
|
||||
runtime::local_state::{QueryInputs, QueryRevisions},
|
||||
tracked_struct::TrackedStructInDb,
|
||||
Database,
|
||||
};
|
||||
|
@ -26,8 +26,8 @@ where
|
|||
None => panic!("can only use `set` with an active query"),
|
||||
};
|
||||
|
||||
let database_key_index = key.database_key_index(db);
|
||||
if !runtime.is_output_of_active_query(database_key_index) {
|
||||
let entity_index = key.database_key_index(db);
|
||||
if !runtime.was_entity_created(entity_index) {
|
||||
panic!("can only use `set` on entities created during current query");
|
||||
}
|
||||
|
||||
|
@ -49,22 +49,20 @@ where
|
|||
//
|
||||
// - a result that is verified in the current revision, because it was set, which will use the set value
|
||||
// - a result that is NOT verified and has untracked inputs, which will re-execute (and likely panic)
|
||||
let edges = QueryEdges {
|
||||
let inputs = QueryInputs {
|
||||
untracked: false,
|
||||
separator: 0,
|
||||
input_outputs: runtime.empty_dependencies(),
|
||||
tracked: runtime.empty_dependencies(),
|
||||
};
|
||||
|
||||
let revision = runtime.current_revision();
|
||||
let mut revisions = QueryRevisions {
|
||||
changed_at: current_deps.changed_at,
|
||||
durability: current_deps.durability,
|
||||
edges,
|
||||
inputs,
|
||||
};
|
||||
|
||||
if let Some(old_memo) = self.memo_map.get(key) {
|
||||
self.backdate_if_appropriate(&old_memo, &mut revisions, &value);
|
||||
self.diff_outputs(db, database_key_index, &old_memo, &revisions);
|
||||
}
|
||||
|
||||
let memo = Memo {
|
||||
|
@ -73,20 +71,6 @@ where
|
|||
revisions,
|
||||
};
|
||||
|
||||
log::debug!("specify: about to add memo {:#?} for key {:?}", memo, key);
|
||||
self.insert_memo(key, memo);
|
||||
}
|
||||
|
||||
/// Specify the value for `key` *and* record that we did so.
|
||||
/// Used for explicit calls to `specify`, but not needed for pre-declared tracked struct fields.
|
||||
pub fn specify_and_record<'db>(&self, db: &'db DynDb<'db, C>, key: C::Key, value: C::Value)
|
||||
where
|
||||
C::Key: TrackedStructInDb<DynDb<'db, C>>,
|
||||
{
|
||||
self.specify(db, key, value);
|
||||
|
||||
// Record that the current query *specified* a value for this cell.
|
||||
let database_key_index = self.database_key_index(key);
|
||||
db.salsa_runtime().add_output(database_key_index);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ use crossbeam::atomic::AtomicCell;
|
|||
|
||||
use crate::{
|
||||
durability::Durability,
|
||||
runtime::local_state::{QueryEdges, QueryRevisions},
|
||||
runtime::local_state::{QueryInputs, QueryRevisions},
|
||||
Runtime,
|
||||
};
|
||||
|
||||
|
@ -28,10 +28,9 @@ where
|
|||
revisions: QueryRevisions {
|
||||
changed_at: revision,
|
||||
durability,
|
||||
edges: QueryEdges {
|
||||
inputs: QueryInputs {
|
||||
untracked: false,
|
||||
separator: 0,
|
||||
input_outputs: runtime.empty_dependencies(),
|
||||
tracked: runtime.empty_dependencies(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use crate::{
|
||||
cycle::CycleRecoveryStrategy, key::DependencyIndex, runtime::local_state::QueryEdges,
|
||||
DatabaseKeyIndex, Id,
|
||||
cycle::CycleRecoveryStrategy, key::DependencyIndex, runtime::local_state::QueryInputs, Id,
|
||||
};
|
||||
|
||||
use super::Revision;
|
||||
|
@ -22,13 +21,7 @@ pub trait Ingredient<DB: ?Sized> {
|
|||
fn maybe_changed_after(&self, db: &DB, input: DependencyIndex, revision: Revision) -> bool;
|
||||
|
||||
/// What were the inputs (if any) that were used to create the value at `key_index`.
|
||||
fn inputs(&self, key_index: Id) -> Option<QueryEdges>;
|
||||
|
||||
/// Invoked when the value `stale_output` was output by `executor` in a previous
|
||||
/// revision, but was NOT output in the current revision.
|
||||
///
|
||||
/// This hook is used to clear out the stale value so others cannot read it.
|
||||
fn remove_stale_output(&self, executor: DatabaseKeyIndex, stale_output_key: Option<Id>);
|
||||
fn inputs(&self, key_index: Id) -> Option<QueryInputs>;
|
||||
}
|
||||
|
||||
/// Optional trait for ingredients that wish to be notified when new revisions are
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::{
|
|||
cycle::CycleRecoveryStrategy,
|
||||
ingredient::Ingredient,
|
||||
key::{DatabaseKeyIndex, DependencyIndex},
|
||||
runtime::{local_state::QueryEdges, Runtime},
|
||||
runtime::{local_state::QueryInputs, Runtime},
|
||||
AsId, IngredientIndex, Revision,
|
||||
};
|
||||
|
||||
|
@ -58,11 +58,7 @@ where
|
|||
CycleRecoveryStrategy::Panic
|
||||
}
|
||||
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryEdges> {
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryInputs> {
|
||||
None
|
||||
}
|
||||
|
||||
fn remove_stale_output(&self, executor: DatabaseKeyIndex, stale_output_key: Option<crate::Id>) {
|
||||
unreachable!("input cannot be the output of a tracked function");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,9 +6,8 @@ use std::marker::PhantomData;
|
|||
use crate::durability::Durability;
|
||||
use crate::id::AsId;
|
||||
use crate::key::DependencyIndex;
|
||||
use crate::runtime::local_state::QueryEdges;
|
||||
use crate::runtime::local_state::QueryInputs;
|
||||
use crate::runtime::Runtime;
|
||||
use crate::DatabaseKeyIndex;
|
||||
|
||||
use super::hash::FxDashMap;
|
||||
use super::ingredient::Ingredient;
|
||||
|
@ -195,13 +194,9 @@ where
|
|||
crate::cycle::CycleRecoveryStrategy::Panic
|
||||
}
|
||||
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryEdges> {
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryInputs> {
|
||||
None
|
||||
}
|
||||
|
||||
fn remove_stale_output(&self, executor: DatabaseKeyIndex, stale_output_key: Option<crate::Id>) {
|
||||
unreachable!("interned ids are not outputs");
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IdentityInterner<Id: AsId> {
|
||||
|
|
|
@ -144,15 +144,15 @@ impl Runtime {
|
|||
}
|
||||
}
|
||||
|
||||
/// Adds `key` to the list of output created by the current query
|
||||
/// (if not already present).
|
||||
pub(crate) fn add_output(&self, key: DatabaseKeyIndex) {
|
||||
self.local_state.add_output(key);
|
||||
/// Adds `entity` to the lits of entities created by the current query.
|
||||
/// Panics if `entity` was already added.
|
||||
pub(crate) fn add_entity_created(&self, entity: DatabaseKeyIndex) {
|
||||
self.local_state.add_entity_created(entity);
|
||||
}
|
||||
|
||||
/// Check whether `entity` is contained the list of outputs written by the current query.
|
||||
pub(super) fn is_output_of_active_query(&self, entity: DatabaseKeyIndex) -> bool {
|
||||
self.local_state.is_output(entity)
|
||||
/// Check whether `entity` is contained the list of entities created by the current query.
|
||||
pub(super) fn was_entity_created(&self, entity: DatabaseKeyIndex) -> bool {
|
||||
self.local_state.was_entity_created(entity)
|
||||
}
|
||||
|
||||
/// Called when the active queries creates an index from the
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use std::collections::BTreeSet;
|
||||
|
||||
use crate::{
|
||||
durability::Durability,
|
||||
hash::{FxHashSet, FxIndexMap, FxIndexSet},
|
||||
|
@ -8,7 +6,7 @@ use crate::{
|
|||
Cycle, Revision, Runtime,
|
||||
};
|
||||
|
||||
use super::local_state::{QueryEdges, QueryRevisions};
|
||||
use super::local_state::{QueryInputs, QueryRevisions};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct ActiveQuery {
|
||||
|
@ -36,15 +34,8 @@ pub(super) struct ActiveQuery {
|
|||
/// Otherwise it is 1 more than the current value (which is incremented).
|
||||
pub(super) disambiguator_map: FxIndexMap<u64, Disambiguator>,
|
||||
|
||||
/// Tracks values written by this query. Could be...
|
||||
///
|
||||
/// * tracked structs created
|
||||
/// * invocations of `specify`
|
||||
/// * accumulators pushed to
|
||||
///
|
||||
/// We use a btree-set because we want to be able to
|
||||
/// extract the keys in sorted order.
|
||||
pub(super) outputs: BTreeSet<DatabaseKeyIndex>,
|
||||
/// Tracks entities created by this query.
|
||||
pub(super) entities_created: FxHashSet<DatabaseKeyIndex>,
|
||||
}
|
||||
|
||||
impl ActiveQuery {
|
||||
|
@ -57,7 +48,7 @@ impl ActiveQuery {
|
|||
untracked_read: false,
|
||||
cycle: None,
|
||||
disambiguator_map: Default::default(),
|
||||
outputs: Default::default(),
|
||||
entities_created: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,38 +75,28 @@ impl ActiveQuery {
|
|||
self.changed_at = self.changed_at.max(revision);
|
||||
}
|
||||
|
||||
/// Adds a key to our list of outputs.
|
||||
pub(super) fn add_output(&mut self, key: DatabaseKeyIndex) {
|
||||
self.outputs.insert(key);
|
||||
pub(super) fn add_entity_created(&mut self, entity: DatabaseKeyIndex) {
|
||||
let is_new = self.entities_created.insert(entity);
|
||||
assert!(is_new);
|
||||
}
|
||||
|
||||
/// True if the given key was output by this query.
|
||||
pub(super) fn is_output(&self, key: DatabaseKeyIndex) -> bool {
|
||||
self.outputs.contains(&key)
|
||||
pub(super) fn was_entity_created(&self, entity: DatabaseKeyIndex) -> bool {
|
||||
self.entities_created.contains(&entity)
|
||||
}
|
||||
|
||||
pub(crate) fn revisions(&self, runtime: &Runtime) -> QueryRevisions {
|
||||
let separator = u32::try_from(self.dependencies.len()).unwrap();
|
||||
|
||||
let input_outputs = if self.dependencies.is_empty() && self.outputs.is_empty() {
|
||||
runtime.empty_dependencies()
|
||||
} else {
|
||||
self.dependencies
|
||||
.iter()
|
||||
.copied()
|
||||
.chain(self.outputs.iter().map(|&o| o.into()))
|
||||
.collect()
|
||||
};
|
||||
|
||||
let edges = QueryEdges {
|
||||
let inputs = QueryInputs {
|
||||
untracked: self.untracked_read,
|
||||
separator,
|
||||
input_outputs,
|
||||
tracked: if self.dependencies.is_empty() {
|
||||
runtime.empty_dependencies()
|
||||
} else {
|
||||
self.dependencies.iter().copied().collect()
|
||||
},
|
||||
};
|
||||
|
||||
QueryRevisions {
|
||||
changed_at: self.changed_at,
|
||||
edges,
|
||||
inputs,
|
||||
durability: self.durability,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ pub(crate) struct QueryRevisions {
|
|||
pub(crate) durability: Durability,
|
||||
|
||||
/// The inputs that went into our query, if we are tracking them.
|
||||
pub(crate) edges: QueryEdges,
|
||||
pub(crate) inputs: QueryInputs,
|
||||
}
|
||||
|
||||
impl QueryRevisions {
|
||||
|
@ -53,53 +53,18 @@ impl QueryRevisions {
|
|||
}
|
||||
}
|
||||
|
||||
/// The edges between a memoized value and other queries in the dependency graph.
|
||||
/// These edges include both dependency edges
|
||||
/// e.g., when creating the memoized value for Q0 executed another function Q1)
|
||||
/// and output edges
|
||||
/// (e.g., when Q0 specified the value for another query Q2).
|
||||
/// Every input.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct QueryEdges {
|
||||
/// The list of outgoing edges from this node.
|
||||
/// This list combines *both* inputs and outputs.
|
||||
/// The inputs are defined from the indices `0..S` where
|
||||
/// `S` is the value of the `separator` field.
|
||||
///
|
||||
/// Note that we always track input dependencies even when there are untracked reads.
|
||||
/// Untracked reads mean that we can't verify values, so we don't use the list of inputs for that,
|
||||
/// but we still use it for finding the transitive inputs to an accumulator.
|
||||
///
|
||||
/// You can access the input/output list via the methods [`inputs`] and [`outputs`] respectively.
|
||||
///
|
||||
/// Important:
|
||||
///
|
||||
/// * The inputs must be in **execution order** for the red-green algorithm to work.
|
||||
/// * The outputs must be in **sorted order** so that we can easily "diff" them between revisions.
|
||||
pub(crate) input_outputs: Arc<[DependencyIndex]>,
|
||||
|
||||
/// The index that separates inputs from outputs in the `tracked` field.
|
||||
pub(crate) separator: u32,
|
||||
pub struct QueryInputs {
|
||||
/// Inputs that are fully known.
|
||||
/// We track these even if there are unknown inputs so that the accumulator code
|
||||
/// can walk all the inputs even for tracked functions that read untracked values.
|
||||
pub(crate) tracked: Arc<[DependencyIndex]>,
|
||||
|
||||
/// Where there any *unknown* inputs?
|
||||
pub(crate) untracked: bool,
|
||||
}
|
||||
|
||||
impl QueryEdges {
|
||||
/// Returns the (tracked) inputs that were executed in computing this memoized value.
|
||||
///
|
||||
/// These will always be in execution order.
|
||||
pub(crate) fn inputs(&self) -> &[DependencyIndex] {
|
||||
&self.input_outputs[0..self.separator as usize]
|
||||
}
|
||||
|
||||
/// Returns the queries whose values were assigned while computing this memoized value.
|
||||
///
|
||||
/// These will always be in sorted order.
|
||||
pub(crate) fn outputs(&self) -> &[DependencyIndex] {
|
||||
&self.input_outputs[self.separator as usize..]
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LocalState {
|
||||
fn default() -> Self {
|
||||
LocalState {
|
||||
|
@ -150,18 +115,18 @@ impl LocalState {
|
|||
})
|
||||
}
|
||||
|
||||
pub(super) fn add_output(&self, entity: DatabaseKeyIndex) {
|
||||
pub(super) fn add_entity_created(&self, entity: DatabaseKeyIndex) {
|
||||
self.with_query_stack(|stack| {
|
||||
if let Some(top_query) = stack.last_mut() {
|
||||
top_query.add_output(entity)
|
||||
top_query.add_entity_created(entity)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn is_output(&self, entity: DatabaseKeyIndex) -> bool {
|
||||
pub(super) fn was_entity_created(&self, entity: DatabaseKeyIndex) -> bool {
|
||||
self.with_query_stack(|stack| {
|
||||
if let Some(top_query) = stack.last_mut() {
|
||||
top_query.is_output(entity)
|
||||
top_query.was_entity_created(entity)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::cycle::CycleRecoveryStrategy;
|
|||
use crate::ingredient::Ingredient;
|
||||
use crate::jar::Jar;
|
||||
use crate::key::DependencyIndex;
|
||||
use crate::runtime::local_state::QueryEdges;
|
||||
use crate::runtime::local_state::QueryInputs;
|
||||
use crate::runtime::Runtime;
|
||||
use crate::{Database, DatabaseKeyIndex, IngredientIndex};
|
||||
|
||||
|
@ -178,9 +178,7 @@ pub trait HasJarsDyn {
|
|||
|
||||
fn cycle_recovery_strategy(&self, input: IngredientIndex) -> CycleRecoveryStrategy;
|
||||
|
||||
fn inputs(&self, input: DatabaseKeyIndex) -> Option<QueryEdges>;
|
||||
|
||||
fn remove_stale_output(&self, executor: DatabaseKeyIndex, stale_output: DependencyIndex);
|
||||
fn inputs(&self, input: DatabaseKeyIndex) -> Option<QueryInputs>;
|
||||
}
|
||||
|
||||
pub trait HasIngredientsFor<I>
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
|||
ingredient::{Ingredient, MutIngredient},
|
||||
interned::{InternedData, InternedId, InternedIngredient},
|
||||
key::{DatabaseKeyIndex, DependencyIndex},
|
||||
runtime::{local_state::QueryEdges, Runtime},
|
||||
runtime::{local_state::QueryInputs, Runtime},
|
||||
salsa_struct::SalsaStructInDb,
|
||||
Database, IngredientIndex, Revision,
|
||||
};
|
||||
|
@ -76,7 +76,7 @@ where
|
|||
data,
|
||||
};
|
||||
let result = self.interned.intern(runtime, entity_key);
|
||||
runtime.add_output(self.database_key_index(result));
|
||||
runtime.add_entity_created(self.database_key_index(result));
|
||||
result
|
||||
}
|
||||
|
||||
|
@ -114,14 +114,9 @@ where
|
|||
<_ as Ingredient<DB>>::cycle_recovery_strategy(&self.interned)
|
||||
}
|
||||
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryEdges> {
|
||||
fn inputs(&self, _key_index: crate::Id) -> Option<QueryInputs> {
|
||||
None
|
||||
}
|
||||
|
||||
fn remove_stale_output(&self, executor: DatabaseKeyIndex, stale_output_key: Option<crate::Id>) {
|
||||
let key: Id = Id::from_id(stale_output_key.unwrap());
|
||||
// FIXME -- we can delete this entity
|
||||
}
|
||||
}
|
||||
|
||||
impl<DB: ?Sized, Id, Data> MutIngredient<DB> for TrackedStructIngredient<Id, Data>
|
||||
|
|
|
@ -1,360 +0,0 @@
|
|||
//! Test that a `tracked` fn on a `salsa::input`
|
||||
//! compiles and executes successfully.
|
||||
|
||||
use expect_test::expect;
|
||||
use salsa::DebugWithDb;
|
||||
use salsa_2022_tests::{HasLogger, Logger};
|
||||
use test_log::test;
|
||||
|
||||
#[salsa::jar(db = Db)]
|
||||
struct Jar(
|
||||
MyInput,
|
||||
MyTracked,
|
||||
maybe_specified,
|
||||
read_maybe_specified,
|
||||
create_tracked,
|
||||
final_result,
|
||||
);
|
||||
|
||||
trait Db: salsa::DbWithJar<Jar> + HasLogger {}
|
||||
|
||||
#[salsa::input]
|
||||
struct MyInput {
|
||||
field: u32,
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
struct MyTracked {
|
||||
input: MyInput,
|
||||
}
|
||||
|
||||
/// If the input is in the range 0..10, this is specified to return 10.
|
||||
/// Otherwise, the default occurs, and it returns the input.
|
||||
#[salsa::tracked(specify)]
|
||||
fn maybe_specified(db: &dyn Db, tracked: MyTracked) -> u32 {
|
||||
db.push_log(format!("maybe_specified({:?})", tracked));
|
||||
tracked.input(db).field(db)
|
||||
}
|
||||
|
||||
/// Reads maybe-specified and multiplies it by 10.
|
||||
/// This is here to show whether we can detect when `maybe_specified` has changed
|
||||
/// and control down-stream work accordingly.
|
||||
#[salsa::tracked]
|
||||
fn read_maybe_specified(db: &dyn Db, tracked: MyTracked) -> u32 {
|
||||
db.push_log(format!("read_maybe_specified({:?})", tracked));
|
||||
maybe_specified(db, tracked) * 10
|
||||
}
|
||||
|
||||
/// Create a tracked value and *maybe* specify a value for
|
||||
/// `maybe_specified`
|
||||
#[salsa::tracked(jar = Jar)]
|
||||
fn create_tracked(db: &dyn Db, input: MyInput) -> MyTracked {
|
||||
db.push_log(format!("create_tracked({:?})", input));
|
||||
let tracked = MyTracked::new(db, input);
|
||||
if input.field(db) < 10 {
|
||||
maybe_specified::specify(db, tracked, 10);
|
||||
}
|
||||
tracked
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
fn final_result(db: &dyn Db, input: MyInput) -> u32 {
|
||||
db.push_log(format!("final_result({:?})", input));
|
||||
let tracked = create_tracked(db, input);
|
||||
read_maybe_specified(db, tracked)
|
||||
}
|
||||
|
||||
#[salsa::db(Jar)]
|
||||
#[derive(Default)]
|
||||
struct Database {
|
||||
storage: salsa::Storage<Self>,
|
||||
logger: Logger,
|
||||
}
|
||||
|
||||
impl salsa::Database for Database {
|
||||
fn salsa_runtime(&self) -> &salsa::Runtime {
|
||||
self.storage.runtime()
|
||||
}
|
||||
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
self.push_log(format!("{:?}", event.debug(self)));
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for Database {}
|
||||
|
||||
impl HasLogger for Database {
|
||||
fn logger(&self) -> &Logger {
|
||||
&self.logger
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_0() {
|
||||
let mut db = Database::default();
|
||||
|
||||
let input = MyInput::new(&mut db, 0);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
"final_result(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_5() {
|
||||
let mut db = Database::default();
|
||||
|
||||
let input = MyInput::new(&mut db, 5);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
"final_result(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_10() {
|
||||
let mut db = Database::default();
|
||||
|
||||
let input = MyInput::new(&mut db, 10);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
"final_result(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 1 }) } } }",
|
||||
"maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_20() {
|
||||
let mut db = Database::default();
|
||||
|
||||
let input = MyInput::new(&mut db, 20);
|
||||
assert_eq!(final_result(&db, input), 200);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
"final_result(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 1 }) } } }",
|
||||
"maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_0_then_5_then_20() {
|
||||
let mut db = Database::default();
|
||||
|
||||
let input = MyInput::new(&mut db, 0);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
"final_result(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
|
||||
// FIXME: read_maybe_specified should not re-execute
|
||||
let input = MyInput::new(&mut db, 5);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 2 }) } } }",
|
||||
"final_result(MyInput(Id { value: 2 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 2 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 2 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 2 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 2 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
|
||||
input.set_field(&mut db, 20);
|
||||
assert_eq!(final_result(&db, input), 100); // FIXME: Should be 20.
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 2 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 2 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillDiscardStaleOutput { execute_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 2 }) }, output_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 2 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 2 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 2 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 2 }) } } }",
|
||||
]"#]]); // FIXME: should invoke maybe_specified
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_0_then_5_then_10_then_20() {
|
||||
let mut db = Database::default();
|
||||
|
||||
let input = MyInput::new(&mut db, 0);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
"final_result(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
|
||||
// FIXME: `read_maybe_specified` should not re-execute
|
||||
input.set_field(&mut db, 5);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
]"#]]);
|
||||
|
||||
// FIXME: should execute `maybe_specified` but not `read_maybe_specified`
|
||||
input.set_field(&mut db, 10);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillDiscardStaleOutput { execute_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) }, output_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
]"#]]);
|
||||
|
||||
// FIXME: should execute `maybe_specified` but not `read_maybe_specified`
|
||||
input.set_field(&mut db, 20);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
]"#]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_run_5_then_20() {
|
||||
let mut db = Database::default();
|
||||
|
||||
let input = MyInput::new(&mut db, 5);
|
||||
assert_eq!(final_result(&db, input), 100);
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
"final_result(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"read_maybe_specified(MyTracked(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
]"#]]);
|
||||
|
||||
input.set_field(&mut db, 20);
|
||||
assert_eq!(final_result(&db, input), 100); // FIXME: Should be 20.
|
||||
db.assert_logs(expect![[r#"
|
||||
[
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillExecute { database_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) } } }",
|
||||
"create_tracked(MyInput(Id { value: 1 }))",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillDiscardStaleOutput { execute_key: DependencyIndex { ingredient_index: IngredientIndex(6), key_index: Some(Id { value: 1 }) }, output_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: WillCheckCancellation }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(4), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(5), key_index: Some(Id { value: 1 }) } } }",
|
||||
"Event { runtime_id: RuntimeId { counter: 0 }, kind: DidValidateMemoizedValue { database_key: DependencyIndex { ingredient_index: IngredientIndex(7), key_index: Some(Id { value: 1 }) } } }",
|
||||
]"#]]); // FIXME: should invoke maybe_specified
|
||||
}
|
Loading…
Reference in a new issue