You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

424 lines
11 KiB

//#![cfg_attr(all(nightly, feature="nightly"), feature(never_type))]
#![allow(dead_code)]
use std::sync::atomic::{
self,
AtomicBool,
AtomicUsize,
};
use std::cmp::Ordering;
use std::mem::{self, MaybeUninit};
use std::cell::UnsafeCell;
use std::ops::Drop;
pub mod iter;
/* XXX: We don't need this. We can just use `()`
#[cfg(all(nightly, feature="nightly"))]
type Void = !;
#[cfg(not(all(nightly, feature="nightly")))]
type Void = std::convert::Infallible;
*/
/// Like PhantomData but for a lifetime. Essentially PhantomData &'a ()
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Copy)]
#[repr(transparent)]
struct PhantomLifetime<'a>(std::marker::PhantomData<&'a ()>);
impl<'a> PhantomLifetime<'a>
{
#[inline(always)]
pub const fn new() -> Self { Self (std::marker::PhantomData) }
}
unsafe impl<'a> Send for PhantomLifetime<'a>{}
unsafe impl<'a> Sync for PhantomLifetime<'a>{}
mod private
{
pub(crate) trait Sealed{}
}
/// A parallel, atomic populator of items
#[derive(Debug)]
pub struct Populator<'a, T: 'a>
{
values: UnsafeCell<Box<[MaybeUninit<T>]>>,
populates: Box<[AtomicBool]>, //
populated: AtomicUsize, // number of populated items
_lt: PhantomLifetime<'a>,
}
#[derive(Debug)] // PartialEq, PartialOrd
pub struct Ref<'re, 'a, T: 'a>
{
pop: &'re Populator<'a, T>,
idx: usize,
//TODO: Maybe add inserted bool, or state representing if this Ref has made a change to the populator. The value will be loaded on creation of the Ref, and will be used as a cached version of `completes[idx].load()`
//TODO: OR: Hold a reference to the actual AtomicBool at `idx` itself?
}
#[inline(always)]
unsafe fn address_eq_overlap<'t, 'u, T, U>(a: &'t T, b: &'u U) -> bool
{
std::ptr::eq(a as *const _, b as *const _ as *const T)
}
#[inline(always)]
fn address_eq<'a, 'b, T: ?Sized>(a: &'a T, b: &'b T) -> bool
{
std::ptr::eq(a as *const _, b as *const _)
}
impl<'re, 'a, T: 'a> PartialEq for Ref<'re, 'a, T>
{
#[inline]
fn eq(&self, other: &Self) -> bool
{
address_eq(self.pop, other.pop) && self.idx == other.idx
}
}
impl<'re, 'a, T: 'a> PartialOrd for Ref<'re, 'a, T>
{
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
if address_eq(self.pop, other.pop) {
self.idx.partial_cmp(&other.idx)
} else {
None
}
}
}
impl<'re, 'a, T: 'a> Ref<'re, 'a, T>
{
/// Checks if the references item currently exists.
#[inline]
pub fn exists(&self) -> bool
{
self.pop.exists(self.idx)
}
//TODO: Rest, including insertions, etc.
}
//TODO: RefEx: Exclusive reference, holds &'ref mut Populator<'a, T>
impl<'a, T: 'a> Populator<'a, T>
{
#[inline(always)]
fn values_mut(&mut self) -> &mut [MaybeUninit<T>]
{
self.values.get_mut()
}
#[inline(always)]
fn values_ref(&self) -> &[MaybeUninit<T>]
{
let ptr = self.values.get() as *const Box<[_]>;
unsafe {
&(*ptr)[..]
}
}
#[inline(always)]
fn get_mut_ptr(&self, idx: usize) -> *mut MaybeUninit<T>
{
let ptr = self.values.get();
unsafe {
&mut (*ptr)[idx] as *mut _
}
}
}
impl<'a, T> Drop for Populator<'a, T>
{
fn drop(&mut self)
{
if mem::needs_drop::<T>() {
let len = self.values_ref().len();
if *self.populated.get_mut() == len {
// Fully populated, drop whole slice in place
unsafe {
std::ptr::drop_in_place( self.values_mut() as *mut [MaybeUninit<T>] as *mut [T])
}
} else if len > 0 { // If values is 0, then that means `[try_]complete()` has been called.
// Partially populated, drop individual parts
for value in self.values.get_mut().iter_mut()
.zip(self.populates.iter()
.map(|x| x.load(atomic::Ordering::Acquire)))
.filter_map(|(v, prod)|
prod.then(move ||
v.as_mut_ptr()))
{
unsafe {
std::ptr::drop_in_place(value)
}
}
}
}
// Both boxes will be dealloced after this, the values are dropped.
}
}
unsafe impl<'a, T: 'a> Send for Populator<'a, T> where Box<T>: Send {}
unsafe impl<'a, T: 'a> Sync for Populator<'a, T>{} // Populator is always sync
//TODO: Maybe add methods with Arc<Self> receivors?
impl<'a, T> Populator<'a, T>
{
/// Checks if an item exists at this index exclusively.
///
/// Since this is an exclusive reference, no atomic operations are performed.
#[inline]
pub fn exists_exclusive(&mut self, idx: usize) -> bool
{
*self.populates[idx].get_mut()
}
/// Checks if an item exists currently at this index.
#[inline]
pub fn exists(&self, idx: usize) -> bool
{
self.populates[idx].load(atomic::Ordering::SeqCst)
}
/// How many items are populated
///
/// Faster access as this is an exclusive reference and no atomic operations are needed
#[inline]
pub fn populated_exclusive(&mut self) -> usize
{
*self.populated.get_mut()
}
#[inline]
/// How many items are populated
pub fn populated(&self) -> usize
{
self.populated.load(atomic::Ordering::Acquire)
}
/// Is the populator full?
#[inline]
pub fn is_full(&self) -> bool
{
self.populated() == self.len()
}
/// Number of items held by the populator
#[inline]
pub fn len(&self) -> usize
{
self.values_ref().len()
}
/// Number of items held by the populator
///
/// A faster access than normal `len()`, since this is an exclusive reference
#[inline]
pub fn len_exclusive(&mut self) -> usize
{
self.values.get_mut().len()
}
/// Create a new, empty populator with this size
pub fn new(size: usize) -> Self
{
Self {
// SAFETY: MaybeUninit is not Copy, so instead we allocate the space for uninitialised memory and then .set_len().
values: UnsafeCell::new(unsafe {
let mut uninit = Vec::with_capacity(size);
uninit.set_len(size);
uninit
}.into_boxed_slice()),
populates: std::iter::repeat_with(|| false.into()).take(size).collect(),
populated: 0usize.into(),
_lt: PhantomLifetime::new(),
}
}
/// Try to insert `value` at `idx`.
///
/// If `idx` already has a value, then `Err(value)` is returned, otherwise, `value` is inserted into the table and the number of items now populated is returned.
pub fn try_insert(&self, idx: usize, value: T) -> Result<usize, T>
{
//TODO: XXX: Should we use SeqCst -> Acquire, or Acquire -> Relaxed?
if let Ok(false) = self.populates[idx].compare_exchange(false, true, atomic::Ordering::SeqCst, atomic::Ordering::Acquire) {
// The value at idx hasn't been set
if cfg!(debug_assertions) {
match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
let ptr = self.get_mut_ptr(idx); //self.values[idx].get();
unsafe {
*ptr = MaybeUninit::new(value);
}
})) {
Err(p) => std::panic::resume_unwind(p),
Ok(_) => (),
}
} else {
// SAFETY: This operation will never panic, since `values` and `populates` are always the same size
// SAFETY: We have already ensured that `values[idx]` does not contain a value.
unsafe {
*self.get_mut_ptr(idx) = MaybeUninit::new(value);
}
}
// Value is inserted, increment `populated`
Ok(self.populated.fetch_add(1, atomic::Ordering::SeqCst) + 1)
} else {
Err(value)
}
}
/// Get a reference to an item at `idx` whether it exists or not.
#[inline]
pub fn get_ref(&self, idx: usize) -> Ref<'_, 'a, T>
{
Ref {
pop: self,
idx
}
}
//TODO: get_excusive -> RefEx
/// Try to get an exclusive, mutable reference to an item at `idx` if an item exists there.
///
/// No atomic operations are performed since this is an exclusive reference.
#[inline]
pub fn try_get_exclusive_mut(&mut self, idx: usize) -> Option<&mut T>
{
if *self.populates[idx].get_mut() {
Some(unsafe{ self.values.get_mut()[idx].assume_init_mut() })
} else {
None
}
}
/// Try to get an exclusive, mutable reference to an item at `idx` if an item exists there.
///
/// No atomic operations are performed since this is an exclusive reference.
#[inline]
pub fn try_get_exclusive(&mut self, idx: usize) -> Option<&T>
{
self.try_get_exclusive_mut(idx).map(|&mut ref a| a)
}
/// Insert `value` into `idx`.
///
/// # Panics
/// If `idx` already has a value inserted.
#[inline] // Maybe?
pub fn insert(&self, idx: usize, value: T) -> usize
{
#[inline(never)]
#[cold]
fn panic_inserted(i: usize) -> !
{
panic!("There is already a value at {}", i)
}
match self.try_insert(idx, value) {
Ok(v) => v,
Err(_) => panic_inserted(idx),
}
}
/// Faster fullness check for when this instance has no other references
#[inline]
pub fn is_full_exclusive(&mut self) -> bool {
*self.populated.get_mut() == self.len()
}
#[inline(always)]
fn take_all(&mut self) -> (Box<[MaybeUninit<T>]>, Box<[AtomicBool]>)
{
let inner = self.values.get_mut();
(mem::replace(inner, vec![].into_boxed_slice()),
mem::replace(&mut self.populates, vec![].into_boxed_slice()))
}
#[inline(always)]
fn take_values(&mut self) -> Box<[MaybeUninit<T>]>
{
let inner = self.values.get_mut();
mem::replace(inner, vec![].into_boxed_slice())
}
/// If all values are populated, then convert it into a boxed slice and return it.
pub fn try_complete(mut self) -> Result<Box<[T]>, Self>
{
if *self.populated.get_mut() == self.len() {
//let ptr = Box::into_raw(std::mem::replace(&mut self.values, UnsafeCell::new(vec![].into_boxed_slice())).into_inner());
let ptr = {
let inner = self.values.get_mut();
Box::into_raw(mem::replace(inner, vec![].into_boxed_slice()))
};
Ok(unsafe {
Box::from_raw(ptr as *mut [T])
})
} else {
Err(self)
}
}
/// Returns the completed population.
///
/// # Panics
/// If the collection is not fully populated.
#[inline] // Maybe?
pub fn complete(self) -> Box<[T]>
{
#[inline(never)]
#[cold]
fn panic_uncomplete() -> !
{
panic!("Not all values had been populated")
}
match self.try_complete() {
Ok(v) => v,
Err(_) => panic_uncomplete(),
}
}
}
impl<'a, T: 'a> FromIterator<Option<T>> for Populator<'a, T>
{
fn from_iter<I: IntoIterator<Item = Option<T>>>(iter: I) -> Self {
let mut v =0usize;
let (items, bools) : (Vec<_>, Vec<AtomicBool>) = iter.into_iter()
.map(|x| x.map(|item| { v +=1; (MaybeUninit::new(item), true.into()) })
.unwrap_or((MaybeUninit::uninit(),false.into())))
.unzip();
debug_assert_eq!(items.len(), bools.len(), "invalid ");
Self {
populated: v.into(),
values: UnsafeCell::new(items.into_boxed_slice()),
populates: bools.into_boxed_slice(),
_lt: PhantomLifetime::new(),
}
}
}
impl<'a, T: 'a> IntoIterator for Populator<'a, T>
{
type Item = T;
type IntoIter = iter::IntoIter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
iter::IntoIter::create_from(self)
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}