mirror of
https://github.com/salsa-rs/salsa.git
synced 2025-01-22 21:05:11 +00:00
Send + Sync requirements
This commit is contained in:
parent
a1651c89d9
commit
65118a0fe6
16 changed files with 83 additions and 323 deletions
|
@ -108,7 +108,8 @@ impl DbMacro {
|
||||||
|
|
||||||
fn add_salsa_view_method(&self, input: &mut syn::ItemTrait) -> syn::Result<()> {
|
fn add_salsa_view_method(&self, input: &mut syn::ItemTrait) -> syn::Result<()> {
|
||||||
input.items.push(parse_quote! {
|
input.items.push(parse_quote! {
|
||||||
fn __salsa_add_view__(&self);
|
#[doc(hidden)]
|
||||||
|
fn zalsa_add_view(&self);
|
||||||
});
|
});
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -121,8 +122,10 @@ impl DbMacro {
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
input.items.push(parse_quote! {
|
input.items.push(parse_quote! {
|
||||||
fn __salsa_add_view__(&self) {
|
#[doc(hidden)]
|
||||||
salsa::storage::views(self).add::<dyn #TraitPath>(|t| t, |t| t);
|
#[allow(uncommon_codepoins)]
|
||||||
|
fn zalsa_add_view(&self) {
|
||||||
|
salsa::storage::views(self).add::<Self, dyn #TraitPath>(|t| t, |t| t);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -1,184 +0,0 @@
|
||||||
use proc_macro2::extra::DelimSpan;
|
|
||||||
use proc_macro2::{Delimiter, Group, Literal, TokenStream};
|
|
||||||
use syn::punctuated::Punctuated;
|
|
||||||
use syn::spanned::Spanned;
|
|
||||||
use syn::visit_mut::VisitMut;
|
|
||||||
use syn::{Field, FieldsUnnamed, Ident, ItemStruct, Path, Token};
|
|
||||||
|
|
||||||
use crate::options::Options;
|
|
||||||
use crate::xform::ChangeLt;
|
|
||||||
|
|
||||||
// Source:
|
|
||||||
//
|
|
||||||
// #[salsa::jar(db = Jar0Db)]
|
|
||||||
// pub struct Jar0(Entity0, Ty0, EntityComponent0, my_func);
|
|
||||||
|
|
||||||
pub(crate) fn jar(
|
|
||||||
args: proc_macro::TokenStream,
|
|
||||||
input: proc_macro::TokenStream,
|
|
||||||
) -> proc_macro::TokenStream {
|
|
||||||
let options = syn::parse_macro_input!(args as Args);
|
|
||||||
let db_path = match options.db_path {
|
|
||||||
Some(v) => v,
|
|
||||||
None => panic!("no `db` specified"),
|
|
||||||
};
|
|
||||||
let input = syn::parse_macro_input!(input as ItemStruct);
|
|
||||||
jar_struct_and_friends(&db_path, &input).into()
|
|
||||||
}
|
|
||||||
|
|
||||||
type Args = Options<Jar>;
|
|
||||||
|
|
||||||
struct Jar;
|
|
||||||
|
|
||||||
impl crate::options::AllowedOptions for Jar {
|
|
||||||
const RETURN_REF: bool = false;
|
|
||||||
|
|
||||||
const SPECIFY: bool = false;
|
|
||||||
|
|
||||||
const NO_EQ: bool = false;
|
|
||||||
|
|
||||||
const SINGLETON: bool = false;
|
|
||||||
|
|
||||||
const JAR: bool = false;
|
|
||||||
|
|
||||||
const DATA: bool = false;
|
|
||||||
|
|
||||||
const DB: bool = true;
|
|
||||||
|
|
||||||
const RECOVERY_FN: bool = false;
|
|
||||||
|
|
||||||
const LRU: bool = false;
|
|
||||||
|
|
||||||
const CONSTRUCTOR_NAME: bool = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn jar_struct_and_friends(
|
|
||||||
jar_trait: &Path,
|
|
||||||
input: &ItemStruct,
|
|
||||||
) -> proc_macro2::TokenStream {
|
|
||||||
let output_struct = jar_struct(input);
|
|
||||||
|
|
||||||
let jar_struct = &input.ident;
|
|
||||||
|
|
||||||
// for each field, we need to generate an impl of `HasIngredientsFor`
|
|
||||||
let has_ingredients_for_impls: Vec<_> = input
|
|
||||||
.fields
|
|
||||||
.iter()
|
|
||||||
.zip(0..)
|
|
||||||
.map(|(field, index)| has_ingredients_for_impl(jar_struct, field, index))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let jar_impl = jar_impl(jar_struct, jar_trait, input);
|
|
||||||
|
|
||||||
quote! {
|
|
||||||
#output_struct
|
|
||||||
|
|
||||||
#(#has_ingredients_for_impls)*
|
|
||||||
|
|
||||||
#jar_impl
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn has_ingredients_for_impl(
|
|
||||||
jar_struct: &Ident,
|
|
||||||
field: &Field,
|
|
||||||
index: u32,
|
|
||||||
) -> proc_macro2::TokenStream {
|
|
||||||
let field_ty = &field.ty;
|
|
||||||
let index = Literal::u32_unsuffixed(index);
|
|
||||||
quote! {
|
|
||||||
impl salsa::storage::HasIngredientsFor<#field_ty> for #jar_struct {
|
|
||||||
fn ingredient(&self) -> &<#field_ty as salsa::storage::IngredientsFor>::Ingredients {
|
|
||||||
&self.#index
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ingredient_mut(&mut self) -> &mut <#field_ty as salsa::storage::IngredientsFor>::Ingredients {
|
|
||||||
&mut self.#index
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn jar_impl(
|
|
||||||
jar_struct: &Ident,
|
|
||||||
jar_trait: &Path,
|
|
||||||
input: &ItemStruct,
|
|
||||||
) -> proc_macro2::TokenStream {
|
|
||||||
let field_tys: Vec<_> = input.fields.iter().map(|f| &f.ty).collect();
|
|
||||||
let field_var_names: &Vec<_> = &input
|
|
||||||
.fields
|
|
||||||
.iter()
|
|
||||||
.zip(0..)
|
|
||||||
.map(|(f, i)| syn::LitInt::new(&format!("{}", i), f.ty.span()))
|
|
||||||
.collect();
|
|
||||||
// ANCHOR: init_jar
|
|
||||||
quote! {
|
|
||||||
unsafe impl salsa::jar::Jar for #jar_struct {
|
|
||||||
type DynDb = dyn #jar_trait;
|
|
||||||
|
|
||||||
unsafe fn init_jar<DB>(place: *mut Self, routes: &mut salsa::routes::Routes<DB>)
|
|
||||||
where
|
|
||||||
DB: salsa::storage::JarFromJars<Self> + salsa::storage::DbWithJar<Self>,
|
|
||||||
{
|
|
||||||
#(
|
|
||||||
unsafe {
|
|
||||||
std::ptr::addr_of_mut!((*place).#field_var_names)
|
|
||||||
.write(<#field_tys as salsa::storage::IngredientsFor>::create_ingredients(routes));
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// ANCHOR_END: init_jar
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn jar_struct(input: &ItemStruct) -> ItemStruct {
|
|
||||||
let mut output_struct = input.clone();
|
|
||||||
output_struct.fields = generate_fields(input).into();
|
|
||||||
if output_struct.semi_token.is_none() {
|
|
||||||
output_struct.semi_token = Some(Token![;](input.struct_token.span));
|
|
||||||
}
|
|
||||||
output_struct
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generate_fields(input: &ItemStruct) -> FieldsUnnamed {
|
|
||||||
// Generate the
|
|
||||||
let mut output_fields = Punctuated::new();
|
|
||||||
for field in input.fields.iter() {
|
|
||||||
let mut field = field.clone();
|
|
||||||
|
|
||||||
// Convert to anonymous fields
|
|
||||||
field.ident = None;
|
|
||||||
|
|
||||||
// Convert ty to reference static and not `'_`
|
|
||||||
ChangeLt::elided_to_static().visit_type_mut(&mut field.ty);
|
|
||||||
|
|
||||||
let field_ty = &field.ty;
|
|
||||||
field.ty =
|
|
||||||
syn::parse2(quote!(< #field_ty as salsa::storage::IngredientsFor >::Ingredients))
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
output_fields.push(field);
|
|
||||||
}
|
|
||||||
|
|
||||||
let paren_token = match &input.fields {
|
|
||||||
syn::Fields::Named(f) => syn::token::Paren {
|
|
||||||
span: f.brace_token.span,
|
|
||||||
},
|
|
||||||
syn::Fields::Unnamed(f) => f.paren_token,
|
|
||||||
syn::Fields::Unit => syn::token::Paren {
|
|
||||||
span: to_delim_span(input),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
FieldsUnnamed {
|
|
||||||
paren_token,
|
|
||||||
unnamed: output_fields,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_delim_span(s: &impl Spanned) -> DelimSpan {
|
|
||||||
let mut group = Group::new(Delimiter::None, TokenStream::new());
|
|
||||||
group.set_span(s.span());
|
|
||||||
group.delim_span()
|
|
||||||
}
|
|
|
@ -46,7 +46,6 @@ mod debug;
|
||||||
mod debug_with_db;
|
mod debug_with_db;
|
||||||
mod input;
|
mod input;
|
||||||
mod interned;
|
mod interned;
|
||||||
mod jar;
|
|
||||||
mod options;
|
mod options;
|
||||||
mod salsa_struct;
|
mod salsa_struct;
|
||||||
mod tracked;
|
mod tracked;
|
||||||
|
@ -60,21 +59,11 @@ pub fn accumulator(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
accumulator::accumulator(args, input)
|
accumulator::accumulator(args, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
|
||||||
pub fn jar(args: TokenStream, input: TokenStream) -> TokenStream {
|
|
||||||
jar::jar(args, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn db(args: TokenStream, input: TokenStream) -> TokenStream {
|
pub fn db(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
db::db(args, input)
|
db::db(args, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
|
||||||
pub fn db_view(args: TokenStream, input: TokenStream) -> TokenStream {
|
|
||||||
db_view::db_view(args, input)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn interned(args: TokenStream, input: TokenStream) -> TokenStream {
|
pub fn interned(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
interned::interned(args, input)
|
interned::interned(args, input)
|
||||||
|
|
|
@ -19,7 +19,7 @@ use crate::{
|
||||||
pub trait Accumulator: Jar {
|
pub trait Accumulator: Jar {
|
||||||
const DEBUG_NAME: &'static str;
|
const DEBUG_NAME: &'static str;
|
||||||
|
|
||||||
type Data: Clone + Debug;
|
type Data: Clone + Debug + Send + Sync;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AccumulatorJar<A: Accumulator> {
|
pub struct AccumulatorJar<A: Accumulator> {
|
||||||
|
|
|
@ -5,6 +5,7 @@ use parking_lot::Mutex;
|
||||||
|
|
||||||
use crate::{storage::DatabaseGen, Durability, Event};
|
use crate::{storage::DatabaseGen, Durability, Event};
|
||||||
|
|
||||||
|
#[salsa_macros::db]
|
||||||
pub trait Database: DatabaseGen {
|
pub trait Database: DatabaseGen {
|
||||||
/// This function is invoked at key points in the salsa
|
/// This function is invoked at key points in the salsa
|
||||||
/// runtime. It permits the database to be customized and to
|
/// runtime. It permits the database to be customized and to
|
||||||
|
@ -37,21 +38,6 @@ pub trait Database: DatabaseGen {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The database view trait allows you to define your own views on the database.
|
|
||||||
/// This lets you add extra context beyond what is stored in the salsa database itself.
|
|
||||||
pub trait DatabaseView<Dyn: ?Sized + Any>: Database {
|
|
||||||
/// Registers this database view in the database.
|
|
||||||
/// This is normally invoked automatically by tracked functions that require a given view.
|
|
||||||
fn add_view_to_db(&self);
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Db: Database> DatabaseView<dyn Database> for Db {
|
|
||||||
fn add_view_to_db(&self) {
|
|
||||||
let upcasts = self.views_of_self();
|
|
||||||
upcasts.add::<dyn Database>(|t| t, |t| t);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Indicates a database that also supports parallel query
|
/// Indicates a database that also supports parallel query
|
||||||
/// evaluation. All of Salsa's base query support is capable of
|
/// evaluation. All of Salsa's base query support is capable of
|
||||||
/// parallel execution, but for it to work, your query key/value types
|
/// parallel execution, but for it to work, your query key/value types
|
||||||
|
@ -194,10 +180,6 @@ impl AttachedDatabase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Send for AttachedDatabase where dyn Database: Sync {}
|
|
||||||
|
|
||||||
unsafe impl Sync for AttachedDatabase where dyn Database: Sync {}
|
|
||||||
|
|
||||||
struct AttachedDb<'db, Db: ?Sized + Database> {
|
struct AttachedDb<'db, Db: ?Sized + Database> {
|
||||||
db: &'db Db,
|
db: &'db Db,
|
||||||
previous: AttachedDatabase,
|
previous: AttachedDatabase,
|
||||||
|
|
|
@ -42,10 +42,10 @@ pub trait Configuration: 'static {
|
||||||
type SalsaStruct<'db>: SalsaStructInDb<Self::DbView>;
|
type SalsaStruct<'db>: SalsaStructInDb<Self::DbView>;
|
||||||
|
|
||||||
/// The input to the function
|
/// The input to the function
|
||||||
type Input<'db>;
|
type Input<'db>: Send + Sync;
|
||||||
|
|
||||||
/// The value computed by the function.
|
/// The value computed by the function.
|
||||||
type Value<'db>: fmt::Debug;
|
type Value<'db>: fmt::Debug + Send + Sync;
|
||||||
|
|
||||||
/// Determines whether this function can recover from being a participant in a cycle
|
/// Determines whether this function can recover from being a participant in a cycle
|
||||||
/// (and, if so, how).
|
/// (and, if so, how).
|
||||||
|
|
|
@ -4,8 +4,8 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
cycle::CycleRecoveryStrategy, key::DependencyIndex, runtime::local_state::QueryOrigin,
|
cycle::CycleRecoveryStrategy, runtime::local_state::QueryOrigin, storage::IngredientIndex,
|
||||||
storage::IngredientIndex, Database, DatabaseKeyIndex, Id,
|
Database, DatabaseKeyIndex, Id,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::Revision;
|
use super::Revision;
|
||||||
|
@ -18,7 +18,7 @@ pub trait Jar: Any {
|
||||||
fn create_ingredients(&self, first_index: IngredientIndex) -> Vec<Box<dyn Ingredient>>;
|
fn create_ingredients(&self, first_index: IngredientIndex) -> Vec<Box<dyn Ingredient>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Ingredient: Any + std::fmt::Debug {
|
pub trait Ingredient: Any + std::fmt::Debug + Send + Sync {
|
||||||
/// Has the value for `input` in this ingredient changed after `revision`?
|
/// Has the value for `input` in this ingredient changed after `revision`?
|
||||||
fn maybe_changed_after<'db>(
|
fn maybe_changed_after<'db>(
|
||||||
&'db self,
|
&'db self,
|
||||||
|
|
51
src/input.rs
51
src/input.rs
|
@ -1,35 +1,31 @@
|
||||||
use std::{
|
use std::{
|
||||||
|
any::Any,
|
||||||
fmt,
|
fmt,
|
||||||
sync::atomic::{AtomicU32, Ordering},
|
sync::atomic::{AtomicU32, Ordering},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
cycle::CycleRecoveryStrategy,
|
cycle::CycleRecoveryStrategy,
|
||||||
id::FromId,
|
id::{AsId, FromId},
|
||||||
ingredient::{fmt_index, Ingredient, IngredientRequiresReset},
|
ingredient::{fmt_index, Ingredient, IngredientRequiresReset},
|
||||||
key::{DatabaseKeyIndex, DependencyIndex},
|
key::DatabaseKeyIndex,
|
||||||
runtime::{local_state::QueryOrigin, Runtime},
|
runtime::{local_state::QueryOrigin, Runtime},
|
||||||
storage::IngredientIndex,
|
storage::IngredientIndex,
|
||||||
Database, Revision,
|
Database, Revision,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait InputId: FromId + 'static {}
|
pub trait Configuration: Any {
|
||||||
impl<T: FromId + 'static> InputId for T {}
|
type Id: FromId + 'static + Send + Sync;
|
||||||
|
}
|
||||||
|
|
||||||
pub struct InputIngredient<Id>
|
pub struct InputIngredient<C: Configuration> {
|
||||||
where
|
|
||||||
Id: InputId,
|
|
||||||
{
|
|
||||||
ingredient_index: IngredientIndex,
|
ingredient_index: IngredientIndex,
|
||||||
counter: AtomicU32,
|
counter: AtomicU32,
|
||||||
debug_name: &'static str,
|
debug_name: &'static str,
|
||||||
_phantom: std::marker::PhantomData<Id>,
|
_phantom: std::marker::PhantomData<C::Id>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Id> InputIngredient<Id>
|
impl<C: Configuration> InputIngredient<C> {
|
||||||
where
|
|
||||||
Id: InputId,
|
|
||||||
{
|
|
||||||
pub fn new(index: IngredientIndex, debug_name: &'static str) -> Self {
|
pub fn new(index: IngredientIndex, debug_name: &'static str) -> Self {
|
||||||
Self {
|
Self {
|
||||||
ingredient_index: index,
|
ingredient_index: index,
|
||||||
|
@ -39,37 +35,34 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn database_key_index(&self, id: Id) -> DatabaseKeyIndex {
|
pub fn database_key_index(&self, id: C::Id) -> DatabaseKeyIndex {
|
||||||
DatabaseKeyIndex {
|
DatabaseKeyIndex {
|
||||||
ingredient_index: self.ingredient_index,
|
ingredient_index: self.ingredient_index,
|
||||||
key_index: id.as_id(),
|
key_index: id.as_id(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_input(&self, _runtime: &Runtime) -> Id {
|
pub fn new_input(&self, _runtime: &Runtime) -> C::Id {
|
||||||
let next_id = self.counter.fetch_add(1, Ordering::Relaxed);
|
let next_id = self.counter.fetch_add(1, Ordering::Relaxed);
|
||||||
Id::from_id(crate::Id::from_u32(next_id))
|
C::Id::from_id(crate::Id::from_u32(next_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_singleton_input(&self, _runtime: &Runtime) -> Id {
|
pub fn new_singleton_input(&self, _runtime: &Runtime) -> C::Id {
|
||||||
// when one exists already, panic
|
// when one exists already, panic
|
||||||
if self.counter.load(Ordering::Relaxed) >= 1 {
|
if self.counter.load(Ordering::Relaxed) >= 1 {
|
||||||
panic!("singleton struct may not be duplicated");
|
panic!("singleton struct may not be duplicated");
|
||||||
}
|
}
|
||||||
// fresh new ingredient
|
// fresh new ingredient
|
||||||
self.counter.store(1, Ordering::Relaxed);
|
self.counter.store(1, Ordering::Relaxed);
|
||||||
Id::from_id(crate::Id::from_u32(0))
|
C::Id::from_id(crate::Id::from_u32(0))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_singleton_input(&self, _runtime: &Runtime) -> Option<Id> {
|
pub fn get_singleton_input(&self, _runtime: &Runtime) -> Option<C::Id> {
|
||||||
(self.counter.load(Ordering::Relaxed) > 0).then(|| Id::from_id(crate::Id::from_u32(0)))
|
(self.counter.load(Ordering::Relaxed) > 0).then(|| C::Id::from_id(crate::Id::from_u32(0)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Id> Ingredient for InputIngredient<Id>
|
impl<C: Configuration> Ingredient for InputIngredient<C> {
|
||||||
where
|
|
||||||
Id: InputId,
|
|
||||||
{
|
|
||||||
fn ingredient_index(&self) -> IngredientIndex {
|
fn ingredient_index(&self) -> IngredientIndex {
|
||||||
self.ingredient_index
|
self.ingredient_index
|
||||||
}
|
}
|
||||||
|
@ -132,17 +125,11 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Id> IngredientRequiresReset for InputIngredient<Id>
|
impl<C: Configuration> IngredientRequiresReset for InputIngredient<C> {
|
||||||
where
|
|
||||||
Id: InputId,
|
|
||||||
{
|
|
||||||
const RESET_ON_NEW_REVISION: bool = false;
|
const RESET_ON_NEW_REVISION: bool = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Id> std::fmt::Debug for InputIngredient<Id>
|
impl<C: Configuration> std::fmt::Debug for InputIngredient<C> {
|
||||||
where
|
|
||||||
Id: InputId,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.debug_struct(std::any::type_name::<Self>())
|
f.debug_struct(std::any::type_name::<Self>())
|
||||||
.field("index", &self.ingredient_index)
|
.field("index", &self.ingredient_index)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::cycle::CycleRecoveryStrategy;
|
use crate::cycle::CycleRecoveryStrategy;
|
||||||
use crate::id::{AsId, FromId};
|
use crate::id::{AsId, FromId};
|
||||||
use crate::ingredient::{fmt_index, Ingredient, IngredientRequiresReset};
|
use crate::ingredient::{fmt_index, Ingredient, IngredientRequiresReset};
|
||||||
use crate::key::DependencyIndex;
|
use crate::input::Configuration;
|
||||||
use crate::plumbing::transmute_lifetime;
|
use crate::plumbing::transmute_lifetime;
|
||||||
use crate::runtime::local_state::QueryOrigin;
|
use crate::runtime::local_state::QueryOrigin;
|
||||||
use crate::runtime::StampedValue;
|
use crate::runtime::StampedValue;
|
||||||
|
@ -10,7 +10,9 @@ use crate::{Database, DatabaseKeyIndex, Durability, Id, Revision, Runtime};
|
||||||
use dashmap::mapref::entry::Entry;
|
use dashmap::mapref::entry::Entry;
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::hash::Hash;
|
|
||||||
|
pub trait InputFieldData: Send + Sync + 'static {}
|
||||||
|
impl<T: Send + Sync + 'static> InputFieldData for T {}
|
||||||
|
|
||||||
/// Ingredient used to represent the fields of a `#[salsa::input]`.
|
/// Ingredient used to represent the fields of a `#[salsa::input]`.
|
||||||
///
|
///
|
||||||
|
@ -21,16 +23,16 @@ use std::hash::Hash;
|
||||||
/// a shared reference, so some locking is required.
|
/// a shared reference, so some locking is required.
|
||||||
/// Altogether this makes the implementation somewhat simpler than tracked
|
/// Altogether this makes the implementation somewhat simpler than tracked
|
||||||
/// structs.
|
/// structs.
|
||||||
pub struct InputFieldIngredient<K, F> {
|
pub struct InputFieldIngredient<C: Configuration, F: InputFieldData> {
|
||||||
index: IngredientIndex,
|
index: IngredientIndex,
|
||||||
map: DashMap<K, Box<StampedValue<F>>>,
|
map: DashMap<C::Id, Box<StampedValue<F>>>,
|
||||||
debug_name: &'static str,
|
debug_name: &'static str,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, F> InputFieldIngredient<K, F>
|
impl<C, F> InputFieldIngredient<C, F>
|
||||||
where
|
where
|
||||||
K: Eq + Hash + AsId + 'static,
|
C: Configuration,
|
||||||
F: 'static,
|
F: InputFieldData,
|
||||||
{
|
{
|
||||||
pub fn new(index: IngredientIndex, debug_name: &'static str) -> Self {
|
pub fn new(index: IngredientIndex, debug_name: &'static str) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
@ -43,7 +45,7 @@ where
|
||||||
pub fn store_mut(
|
pub fn store_mut(
|
||||||
&mut self,
|
&mut self,
|
||||||
runtime: &Runtime,
|
runtime: &Runtime,
|
||||||
key: K,
|
key: C::Id,
|
||||||
value: F,
|
value: F,
|
||||||
durability: Durability,
|
durability: Durability,
|
||||||
) -> Option<F> {
|
) -> Option<F> {
|
||||||
|
@ -62,7 +64,7 @@ where
|
||||||
/// Set the field of a new input.
|
/// Set the field of a new input.
|
||||||
///
|
///
|
||||||
/// This function panics if the field has ever been set before.
|
/// This function panics if the field has ever been set before.
|
||||||
pub fn store_new(&self, runtime: &Runtime, key: K, value: F, durability: Durability) {
|
pub fn store_new(&self, runtime: &Runtime, key: C::Id, value: F, durability: Durability) {
|
||||||
let revision = runtime.current_revision();
|
let revision = runtime.current_revision();
|
||||||
let stamped_value = Box::new(StampedValue {
|
let stamped_value = Box::new(StampedValue {
|
||||||
value,
|
value,
|
||||||
|
@ -80,7 +82,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fetch<'db>(&'db self, runtime: &'db Runtime, key: K) -> &F {
|
pub fn fetch<'db>(&'db self, runtime: &'db Runtime, key: C::Id) -> &F {
|
||||||
let StampedValue {
|
let StampedValue {
|
||||||
value,
|
value,
|
||||||
durability,
|
durability,
|
||||||
|
@ -100,7 +102,7 @@ where
|
||||||
unsafe { transmute_lifetime(self, value) }
|
unsafe { transmute_lifetime(self, value) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn database_key_index(&self, key: K) -> DatabaseKeyIndex {
|
fn database_key_index(&self, key: C::Id) -> DatabaseKeyIndex {
|
||||||
DatabaseKeyIndex {
|
DatabaseKeyIndex {
|
||||||
ingredient_index: self.index,
|
ingredient_index: self.index,
|
||||||
key_index: key.as_id(),
|
key_index: key.as_id(),
|
||||||
|
@ -108,10 +110,10 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, F> Ingredient for InputFieldIngredient<K, F>
|
impl<C, F> Ingredient for InputFieldIngredient<C, F>
|
||||||
where
|
where
|
||||||
K: FromId + 'static,
|
C: Configuration,
|
||||||
F: 'static,
|
F: InputFieldData,
|
||||||
{
|
{
|
||||||
fn ingredient_index(&self) -> IngredientIndex {
|
fn ingredient_index(&self) -> IngredientIndex {
|
||||||
self.index
|
self.index
|
||||||
|
@ -127,7 +129,7 @@ where
|
||||||
input: Option<Id>,
|
input: Option<Id>,
|
||||||
revision: Revision,
|
revision: Revision,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let key = K::from_id(input.unwrap());
|
let key = C::Id::from_id(input.unwrap());
|
||||||
self.map.get(&key).unwrap().changed_at > revision
|
self.map.get(&key).unwrap().changed_at > revision
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,16 +166,18 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, F> IngredientRequiresReset for InputFieldIngredient<K, F>
|
impl<C, F> IngredientRequiresReset for InputFieldIngredient<C, F>
|
||||||
where
|
where
|
||||||
K: AsId,
|
C: Configuration,
|
||||||
|
F: InputFieldData,
|
||||||
{
|
{
|
||||||
const RESET_ON_NEW_REVISION: bool = false;
|
const RESET_ON_NEW_REVISION: bool = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, F> std::fmt::Debug for InputFieldIngredient<K, F>
|
impl<C, F> std::fmt::Debug for InputFieldIngredient<C, F>
|
||||||
where
|
where
|
||||||
K: AsId,
|
C: Configuration,
|
||||||
|
F: InputFieldData,
|
||||||
{
|
{
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.debug_struct(std::any::type_name::<Self>())
|
f.debug_struct(std::any::type_name::<Self>())
|
||||||
|
|
|
@ -21,8 +21,10 @@ use super::Revision;
|
||||||
pub trait Configuration: Sized + 'static {
|
pub trait Configuration: Sized + 'static {
|
||||||
const DEBUG_NAME: &'static str;
|
const DEBUG_NAME: &'static str;
|
||||||
|
|
||||||
type Data<'db>: InternedData;
|
/// The type of data being interned
|
||||||
|
type Data<'db>: InternedData + Send + Sync;
|
||||||
|
|
||||||
|
/// The end user struct
|
||||||
type Struct<'db>: Copy;
|
type Struct<'db>: Copy;
|
||||||
|
|
||||||
/// Create an end-user struct from the underlying raw pointer.
|
/// Create an end-user struct from the underlying raw pointer.
|
||||||
|
|
24
src/jar.rs
24
src/jar.rs
|
@ -1,24 +0,0 @@
|
||||||
use crate::{
|
|
||||||
storage::{HasJar, JarFromJars},
|
|
||||||
Database, DbWithJar,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::routes::Routes;
|
|
||||||
|
|
||||||
/// Representative trait of a salsa jar
|
|
||||||
///
|
|
||||||
/// # Safety
|
|
||||||
///
|
|
||||||
/// `init_jar` must fully initialize the jar
|
|
||||||
pub unsafe trait Jar: Sized {
|
|
||||||
type DynDb: ?Sized + HasJar<Self> + Database;
|
|
||||||
|
|
||||||
/// Initializes the jar at `place`
|
|
||||||
///
|
|
||||||
/// # Safety
|
|
||||||
///
|
|
||||||
/// `place` must be a valid pointer to this jar
|
|
||||||
unsafe fn init_jar<DB>(place: *mut Self, routes: &mut Routes<DB>)
|
|
||||||
where
|
|
||||||
DB: JarFromJars<Self> + DbWithJar<Self>;
|
|
||||||
}
|
|
|
@ -28,7 +28,6 @@ mod views;
|
||||||
pub use self::cancelled::Cancelled;
|
pub use self::cancelled::Cancelled;
|
||||||
pub use self::cycle::Cycle;
|
pub use self::cycle::Cycle;
|
||||||
pub use self::database::Database;
|
pub use self::database::Database;
|
||||||
pub use self::database::DatabaseView;
|
|
||||||
pub use self::database::ParallelDatabase;
|
pub use self::database::ParallelDatabase;
|
||||||
pub use self::database::Snapshot;
|
pub use self::database::Snapshot;
|
||||||
pub use self::durability::Durability;
|
pub use self::durability::Durability;
|
||||||
|
@ -43,7 +42,6 @@ pub use salsa_macros::accumulator;
|
||||||
pub use salsa_macros::db;
|
pub use salsa_macros::db;
|
||||||
pub use salsa_macros::input;
|
pub use salsa_macros::input;
|
||||||
pub use salsa_macros::interned;
|
pub use salsa_macros::interned;
|
||||||
pub use salsa_macros::jar;
|
|
||||||
pub use salsa_macros::tracked;
|
pub use salsa_macros::tracked;
|
||||||
pub use salsa_macros::DebugWithDb;
|
pub use salsa_macros::DebugWithDb;
|
||||||
pub use salsa_macros::Update;
|
pub use salsa_macros::Update;
|
||||||
|
|
|
@ -1,25 +1,26 @@
|
||||||
use crate::id::AsId;
|
use crate::id::AsId;
|
||||||
use crate::input_field::InputFieldIngredient;
|
use crate::input::Configuration;
|
||||||
|
use crate::input_field::{InputFieldData, InputFieldIngredient};
|
||||||
use crate::{Durability, Runtime};
|
use crate::{Durability, Runtime};
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub struct Setter<'setter, K, F> {
|
pub struct Setter<'setter, C: Configuration, F: InputFieldData> {
|
||||||
runtime: &'setter mut Runtime,
|
runtime: &'setter mut Runtime,
|
||||||
key: K,
|
key: C::Id,
|
||||||
ingredient: &'setter mut InputFieldIngredient<K, F>,
|
ingredient: &'setter mut InputFieldIngredient<C, F>,
|
||||||
durability: Durability,
|
durability: Durability,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'setter, K, F> Setter<'setter, K, F>
|
impl<'setter, C, F> Setter<'setter, C, F>
|
||||||
where
|
where
|
||||||
K: Eq + Hash + AsId + 'static,
|
C: Configuration,
|
||||||
F: 'static,
|
F: InputFieldData,
|
||||||
{
|
{
|
||||||
pub fn new(
|
pub fn new(
|
||||||
runtime: &'setter mut Runtime,
|
runtime: &'setter mut Runtime,
|
||||||
key: K,
|
key: C::Id,
|
||||||
ingredient: &'setter mut InputFieldIngredient<K, F>,
|
ingredient: &'setter mut InputFieldIngredient<C, F>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Setter {
|
Setter {
|
||||||
runtime,
|
runtime,
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub fn views<Db: ?Sized + Database>(db: &Db) -> &Views {
|
||||||
///
|
///
|
||||||
/// This trait is meant to be implemented by our procedural macro.
|
/// This trait is meant to be implemented by our procedural macro.
|
||||||
/// We need to document any non-obvious conditions that it satisfies.
|
/// We need to document any non-obvious conditions that it satisfies.
|
||||||
pub unsafe trait DatabaseGen: Any + Send + Sync {
|
pub unsafe trait DatabaseGen: Any {
|
||||||
/// Upcast to a `dyn Database`.
|
/// Upcast to a `dyn Database`.
|
||||||
///
|
///
|
||||||
/// Only required because upcasts not yet stabilized (*grr*).
|
/// Only required because upcasts not yet stabilized (*grr*).
|
||||||
|
@ -80,9 +80,9 @@ pub unsafe trait DatabaseGen: Any + Send + Sync {
|
||||||
///
|
///
|
||||||
/// The `storage` field must be an owned field of
|
/// The `storage` field must be an owned field of
|
||||||
/// the implementing struct.
|
/// the implementing struct.
|
||||||
pub unsafe trait HasStorage: Database + Sized + Any + Send + Sync {
|
pub unsafe trait HasStorage: Database + Sized + Any {
|
||||||
fn storage(&self) -> &Storage<Self>;
|
fn storage(&self) -> &Storage<Self>;
|
||||||
fn storage_mut(&self) -> &mut Storage<Self>;
|
fn storage_mut(&mut self) -> &mut Storage<Self>;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<T: HasStorage> DatabaseGen for T {
|
unsafe impl<T: HasStorage> DatabaseGen for T {
|
||||||
|
@ -98,13 +98,6 @@ unsafe impl<T: HasStorage> DatabaseGen for T {
|
||||||
&self.storage().shared.upcasts
|
&self.storage().shared.upcasts
|
||||||
}
|
}
|
||||||
|
|
||||||
fn views_of_self(&self) -> &ViewsOf<Self>
|
|
||||||
where
|
|
||||||
Self: Sized + Database,
|
|
||||||
{
|
|
||||||
&self.storage().shared.upcasts
|
|
||||||
}
|
|
||||||
|
|
||||||
fn nonce(&self) -> Nonce<StorageNonce> {
|
fn nonce(&self) -> Nonce<StorageNonce> {
|
||||||
self.storage().shared.nonce
|
self.storage().shared.nonce
|
||||||
}
|
}
|
||||||
|
@ -388,10 +381,19 @@ where
|
||||||
cached_data: std::sync::OnceLock<(Nonce<StorageNonce>, *const I)>,
|
cached_data: std::sync::OnceLock<(Nonce<StorageNonce>, *const I)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsafe impl<I> Sync for IngredientCache<I> where I: Ingredient + Sync {}
|
||||||
|
|
||||||
impl<I> IngredientCache<I>
|
impl<I> IngredientCache<I>
|
||||||
where
|
where
|
||||||
I: Ingredient,
|
I: Ingredient,
|
||||||
{
|
{
|
||||||
|
/// Create a new cache
|
||||||
|
pub const fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
cached_data: std::sync::OnceLock::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get a reference to the ingredient in the database.
|
/// Get a reference to the ingredient in the database.
|
||||||
/// If the ingredient is not already in the cache, it will be created.
|
/// If the ingredient is not already in the cache, it will be created.
|
||||||
pub fn get_or_create<'s>(
|
pub fn get_or_create<'s>(
|
||||||
|
|
|
@ -31,13 +31,13 @@ pub trait Configuration: Jar + Sized + 'static {
|
||||||
const FIELD_DEBUG_NAMES: &'static [&'static str];
|
const FIELD_DEBUG_NAMES: &'static [&'static str];
|
||||||
|
|
||||||
/// A (possibly empty) tuple of the fields for this struct.
|
/// A (possibly empty) tuple of the fields for this struct.
|
||||||
type Fields<'db>;
|
type Fields<'db>: Send + Sync;
|
||||||
|
|
||||||
/// A array of [`Revision`][] values, one per each of the value fields.
|
/// A array of [`Revision`][] values, one per each of the value fields.
|
||||||
/// When a struct is re-recreated in a new revision, the corresponding
|
/// When a struct is re-recreated in a new revision, the corresponding
|
||||||
/// entries for each field are updated to the new revision if their
|
/// entries for each field are updated to the new revision if their
|
||||||
/// values have changed (or if the field is marked as `#[no_eq]`).
|
/// values have changed (or if the field is marked as `#[no_eq]`).
|
||||||
type Revisions;
|
type Revisions: Send + Sync;
|
||||||
|
|
||||||
type Struct<'db>: Copy;
|
type Struct<'db>: Copy;
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,7 @@ impl Views {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a new upcast from `Db` to `T`, given the upcasting function `func`.
|
/// Add a new upcast from `Db` to `T`, given the upcasting function `func`.
|
||||||
fn add<Db: Database, DbView: ?Sized + Any>(
|
pub fn add<Db: Database, DbView: ?Sized + Any>(
|
||||||
&self,
|
&self,
|
||||||
func: fn(&Db) -> &DbView,
|
func: fn(&Db) -> &DbView,
|
||||||
func_mut: fn(&mut Db) -> &mut DbView,
|
func_mut: fn(&mut Db) -> &mut DbView,
|
||||||
|
|
Loading…
Reference in a new issue