Added more _exclusive() functions with no atomic operations. Start adding Ref, that tracks a single slot.

Fixed partial iterstor"s size_hint() not updating when the iterator is consumed.

Fortune for parapop's current commit: Future small blessing − 末小吉
master
Avril 3 years ago
parent 49dd362b5b
commit 5ca9459495
Signed by: flanchan
GPG Key ID: 284488987C31F630

@ -0,0 +1 @@
avril@eientei.11314:1642638923

@ -33,10 +33,13 @@ impl<T> PartialIter<T> {
let a = self.0.next()?; let a = self.0.next()?;
let b = if cfg!(debug_assertions) { self.1.next()? } else { unsafe { self.1.next().unwrap_unchecked() } }; let b = if cfg!(debug_assertions) { self.1.next()? } else { unsafe { self.1.next().unwrap_unchecked() } };
self.2-=1;
Some((a, b.into_inner())) Some((a, b.into_inner()))
} }
} }
impl<T> Iterator for PartialIter<T> impl<T> Iterator for PartialIter<T>
{ {
type Item = T; type Item = T;
@ -117,7 +120,7 @@ unsafe fn assume_init_boxed<T>(bx: Box<[MaybeUninit<T>]>) -> Box<[T]>
impl<'a, T> IntoIter<'a, T> impl<'a, T> IntoIter<'a, T>
{ {
pub(super) fn create_from(mut pop: Populator<T>) -> Self pub(super) fn create_from(mut pop: Populator<'a, T>) -> Self
{ {
Self(if pop.is_full_exclusive() { Self(if pop.is_full_exclusive() {
let values = pop.take_values(); let values = pop.take_values();
@ -146,3 +149,29 @@ impl<'a, T> Iterator for IntoIter<'a, T>
} }
impl<'a, T> FusedIterator for IntoIter<'a, T>{} impl<'a, T> FusedIterator for IntoIter<'a, T>{}
impl<'a, T> ExactSizeIterator for IntoIter<'a, T>{} impl<'a, T> ExactSizeIterator for IntoIter<'a, T>{}
#[derive(Debug)]
struct FullIterRef<'a, T>(std::slice::Iter<'a, T>);
impl<'a, T> Iterator for FullIterRef<'a, T>
{
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl<'a, T> DoubleEndedIterator for FullIterRef<'a, T>
{
fn next_back(&mut self) -> Option<Self::Item> {
self.0.next_back()
}
}
impl<'a, T> FusedIterator for FullIterRef<'a, T>{}
impl<'a, T> ExactSizeIterator for FullIterRef<'a, T>{}

@ -6,12 +6,25 @@ use std::sync::atomic::{
AtomicBool, AtomicBool,
AtomicUsize, AtomicUsize,
}; };
use std::cmp::Ordering;
use std::mem::{self, MaybeUninit}; use std::mem::{self, MaybeUninit};
use std::cell::UnsafeCell; use std::cell::UnsafeCell;
use std::ops::Drop; use std::ops::Drop;
pub mod iter; pub mod iter;
use iter::*;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Copy)]
#[repr(transparent)]
struct PhantomLifetime<'a>(std::marker::PhantomData<&'a ()>);
impl<'a> PhantomLifetime<'a>
{
#[inline(always)]
pub const fn new() -> Self { Self (std::marker::PhantomData) }
}
unsafe impl<'a> Send for PhantomLifetime<'a>{}
unsafe impl<'a> Sync for PhantomLifetime<'a>{}
mod private mod private
{ {
@ -19,14 +32,71 @@ mod private
} }
/// A parallel, atomic populator of items /// A parallel, atomic populator of items
pub struct Populator<T> #[derive(Debug)]
pub struct Populator<'a, T: 'a>
{ {
values: UnsafeCell<Box<[MaybeUninit<T>]>>, values: UnsafeCell<Box<[MaybeUninit<T>]>>,
populates: Box<[AtomicBool]>, // populates: Box<[AtomicBool]>, //
populated: AtomicUsize, // number of populated items populated: AtomicUsize, // number of populated items
_lt: PhantomLifetime<'a>,
} }
impl<T> Populator<T> #[derive(Debug)] // PartialEq, PartialOrd
pub struct Ref<'re, 'a, T: 'a>
{
pop: &'re Populator<'a, T>,
idx: usize,
//TODO: Maybe add inserted bool, or state representing if this Ref has made a change to the populator. The value will be loaded on creation of the Ref, and will be used as a cached version of `completes[idx].load()`
//TODO: OR: Hold a reference to the actual AtomicBool at `idx` itself?
}
#[inline(always)]
unsafe fn address_eq_overlap<'t, 'u, T, U>(a: &'t T, b: &'u U) -> bool
{
std::ptr::eq(a as *const _, b as *const _ as *const T)
}
#[inline(always)]
fn address_eq<'a, 'b, T: ?Sized>(a: &'a T, b: &'b T) -> bool
{
std::ptr::eq(a as *const _, b as *const _)
}
impl<'re, 'a, T: 'a> PartialEq for Ref<'re, 'a, T>
{
#[inline]
fn eq(&self, other: &Self) -> bool
{
address_eq(self.pop, other.pop) && self.idx == other.idx
}
}
impl<'re, 'a, T: 'a> PartialOrd for Ref<'re, 'a, T>
{
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
if address_eq(self.pop, other.pop) {
self.idx.partial_cmp(&other.idx)
} else {
None
}
}
}
impl<'re, 'a, T: 'a> Ref<'re, 'a, T>
{
/// Checks if the references item currently exists.
#[inline]
pub fn exists(&self) -> bool
{
self.pop.exists(self.idx)
}
//TODO: Rest, including insertions, etc.
}
//TODO: RefEx: Exclusive reference, holds &'ref mut Populator<'a, T>
impl<'a, T: 'a> Populator<'a, T>
{ {
#[inline(always)] #[inline(always)]
fn values_mut(&mut self) -> &mut [MaybeUninit<T>] fn values_mut(&mut self) -> &mut [MaybeUninit<T>]
@ -53,7 +123,7 @@ impl<T> Populator<T>
} }
} }
impl<T> Drop for Populator<T> impl<'a, T> Drop for Populator<'a, T>
{ {
fn drop(&mut self) fn drop(&mut self)
@ -84,18 +154,41 @@ impl<T> Drop for Populator<T>
} }
} }
unsafe impl<T> Send for Populator<T> where Box<T>: Send {} unsafe impl<'a, T: 'a> Send for Populator<'a, T> where Box<T>: Send {}
unsafe impl<T> Sync for Populator<T>{} // Populator is always sync unsafe impl<'a, T: 'a> Sync for Populator<'a, T>{} // Populator is always sync
impl<T> Populator<T> impl<'a, T> Populator<'a, T>
{ {
/// Checks if an item exists at this index exclusively.
///
/// Since this is an exclusive reference, no atomic operations are performed.
#[inline]
pub fn exists_exclusive(&mut self, idx: usize) -> bool
{
*self.populates[idx].get_mut()
}
/// Checks if an item exists currently at this index.
#[inline]
pub fn exists(&self, idx: usize) -> bool
{
self.populates[idx].load(atomic::Ordering::SeqCst)
}
/// How many items are populated
///
/// Faster access as this is an exclusive reference and no atomic operations are needed
#[inline]
pub fn populated_exclusive(&mut self) -> usize
{
*self.populated.get_mut()
}
#[inline]
/// How many items are populated /// How many items are populated
pub fn populated(&self) -> usize pub fn populated(&self) -> usize
{ {
self.populated.load(atomic::Ordering::Acquire) self.populated.load(atomic::Ordering::Acquire)
} }
/// Is the populator full? /// Is the populator full?
#[inline]
pub fn is_full(&self) -> bool pub fn is_full(&self) -> bool
{ {
self.populated() == self.len() self.populated() == self.len()
@ -106,6 +199,16 @@ impl<T> Populator<T>
{ {
self.values_ref().len() self.values_ref().len()
} }
/// Number of items held by the populator
///
/// A faster access than normal `len()`, since this is an exclusive reference
#[inline]
pub fn len_exclusive(&mut self) -> usize
{
self.values.get_mut().len()
}
/// Create a new, empty populator with this size /// Create a new, empty populator with this size
pub fn new(size: usize) -> Self pub fn new(size: usize) -> Self
{ {
@ -118,6 +221,8 @@ impl<T> Populator<T>
}.into_boxed_slice()), }.into_boxed_slice()),
populates: std::iter::repeat_with(|| false.into()).take(size).collect(), populates: std::iter::repeat_with(|| false.into()).take(size).collect(),
populated: 0usize.into(), populated: 0usize.into(),
_lt: PhantomLifetime::new(),
} }
} }
@ -154,10 +259,43 @@ impl<T> Populator<T>
} }
} }
/// Get a reference to an item at `idx` whether it exists or not.
#[inline]
pub fn get_ref(&self, idx: usize) -> Ref<'_, 'a, T>
{
Ref {
pop: self,
idx
}
}
/// Try to get an exclusive, mutable reference to an item at `idx` if an item exists there.
///
/// No atomic operations are performed since this is an exclusive reference.
#[inline]
pub fn try_get_exclusive_mut(&mut self, idx: usize) -> Option<&mut T>
{
if *self.populates[idx].get_mut() {
Some(unsafe{ self.values.get_mut()[idx].assume_init_mut() })
} else {
None
}
}
/// Try to get an exclusive, mutable reference to an item at `idx` if an item exists there.
///
/// No atomic operations are performed since this is an exclusive reference.
#[inline]
pub fn try_get_exclusive(&mut self, idx: usize) -> Option<&T>
{
self.try_get_exclusive_mut(idx).map(|&mut ref a| a)
}
/// Insert `value` into `idx`. /// Insert `value` into `idx`.
/// ///
/// # Panics /// # Panics
/// If `idx` already has a value inserted. /// If `idx` already has a value inserted.
#[inline] // Maybe?
pub fn insert(&self, idx: usize, value: T) -> usize pub fn insert(&self, idx: usize, value: T) -> usize
{ {
#[inline(never)] #[inline(never)]
@ -215,6 +353,7 @@ impl<T> Populator<T>
/// ///
/// # Panics /// # Panics
/// If the collection is not fully populated. /// If the collection is not fully populated.
#[inline] // Maybe?
pub fn complete(self) -> Box<[T]> pub fn complete(self) -> Box<[T]>
{ {
#[inline(never)] #[inline(never)]
@ -231,10 +370,31 @@ impl<T> Populator<T>
} }
} }
impl<T: 'static> IntoIterator for Populator<T> // FUCK why do we need to make this 'static???? fuck this... dyn dispatch in rust is so jank. why can't we use 'a!!! impl<'a, T: 'a> FromIterator<Option<T>> for Populator<'a, T>
{
fn from_iter<I: IntoIterator<Item = Option<T>>>(iter: I) -> Self {
let mut v =0usize;
let (items, bools) : (Vec<_>, Vec<AtomicBool>) = iter.into_iter()
.map(|x| x.map(|item| { v +=1; (MaybeUninit::new(item), true.into()) })
.unwrap_or((MaybeUninit::uninit(),false.into())))
.unzip();
debug_assert_eq!(items.len(), bools.len(), "invalid ");
Self {
populated: v.into(),
values: UnsafeCell::new(items.into_boxed_slice()),
populates: bools.into_boxed_slice(),
_lt: PhantomLifetime::new(),
}
}
}
impl<'a, T: 'a> IntoIterator for Populator<'a, T> // FUCK why do we need to make this 'static???? fuck this... dyn dispatch in rust is so jank. why can't we use 'a!!!
{ {
type Item = T; type Item = T;
type IntoIter = iter::IntoIter<'static, T>; type IntoIter = iter::IntoIter<'a, T>;
#[inline] #[inline]
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {

Loading…
Cancel
Save