added iterator functions

master
Avril 3 years ago
parent bdba08664e
commit 675cd7cf37
Signed by: flanchan
GPG Key ID: 284488987C31F630

@ -1,6 +1,6 @@
[package]
name = "stackalloc"
version = "1.0.1"
version = "1.1.0"
homepage = "https://git.flanchan.moe/flanchan/stackalloc-rs"
repository = "https://github.com/notflan/stackalloc-rs"
keywords = ["alloca", "stack", "stack-allocation", "safe"]

@ -68,8 +68,8 @@ impl<'a, T> Drop for AVec<'a, T>
/*
for x in slice::from_raw_parts_mut(self.inner.stack.buf_ptr, self.fill_ptr())
{
std::ptr::drop_in_place(x.as_mut_ptr());
}*/
std::ptr::drop_in_place(x.as_mut_ptr());
}*/
}
}
}
@ -90,7 +90,7 @@ impl<'a, T> AVec<'a, T>
/// Have the elements been moved to the heap?
pub fn is_allocated(&self) -> bool
{
self.fill_ptr() > self.stack_sz
self.fill_ptr() >= self.stack_sz
}
/// Create a new `AVec` with this backing buffer.
@ -109,4 +109,52 @@ impl<'a, T> AVec<'a, T>
_stack: PhantomData
}
}
fn move_to_heap(&mut self)
{
let buf: Vec<T> = unsafe {
slice::from_raw_parts(self.inner.stack.buf_ptr as *const MaybeUninit<T>, self.fill_ptr()).iter().map(|x| x.as_ptr().read()).collect()
};
self.inner = Internal {
heap: ManuallyDrop::new(HeapBuffer {
_fill_ptr: self.stack_sz,
buf,
}),
};
}
/// Insert an element into this `AVec`.
pub fn push(&mut self, item: T)
{
if self.is_allocated()
{
unsafe {
(*self.inner.heap).buf.push(item)
}
} else {
unsafe {
let ptr = self.inner.stack.fill_ptr;
*self.inner.stack.buf_ptr.add(ptr) = MaybeUninit::new(item);
self.inner.stack.fill_ptr += 1;
if self.is_allocated() {
// Move all items to heap
self.move_to_heap();
}
}
}
}
/// The number of elements in this `AVec`.
pub fn len(&self) -> usize
{
if self.is_allocated()
{
unsafe {
self.inner.heap.buf.len()
}
} else {
self.fill_ptr()
}
}
}

@ -106,7 +106,7 @@ use std::{
ptr,
};
//TODO: pub mod avec; pub use avec::AVec;
pub mod avec; pub use avec::AVec;
mod ffi;
/// Allocate a runtime length uninitialised byte buffer on the stack, call `callback` with this buffer, and then deallocate the buffer.
@ -228,12 +228,12 @@ use helpers::*;
where F: FnOnce(&mut [u8]) -> T
{
alloca(size, move |buf| {
// SAFETY: We zero-initialise the backing slice
callback(unsafe {
ptr::write_bytes(buf.as_mut_ptr(), 0, buf.len()); // buf.fill(MaybeUninit::zeroed());
slice_assume_init_mut(buf)
})
// SAFETY: We zero-initialise the backing slice
callback(unsafe {
ptr::write_bytes(buf.as_mut_ptr(), 0, buf.len()); // buf.fill(MaybeUninit::zeroed());
slice_assume_init_mut(buf)
})
})
}
@ -247,12 +247,12 @@ where F: FnOnce(&mut [MaybeUninit<T>]) -> U
{
let size_bytes = (std::mem::size_of::<T>() * size) + std::mem::align_of::<T>();
alloca(size_bytes, move |buf| {
let abuf = align_buffer_to::<MaybeUninit<T>>(buf.as_mut_ptr() as *mut u8);
debug_assert!(buf.as_ptr_range().contains(&(abuf as *const _ as *const MaybeUninit<u8>)));
unsafe {
callback(slice::from_raw_parts_mut(abuf, size))
}
})
let abuf = align_buffer_to::<MaybeUninit<T>>(buf.as_mut_ptr() as *mut u8);
debug_assert!(buf.as_ptr_range().contains(&(abuf as *const _ as *const MaybeUninit<u8>)));
unsafe {
callback(slice::from_raw_parts_mut(abuf, size))
}
})
}
/// Allocate a runtime length slice of `T` on the stack, fill it by calling `init_with`, call `callback` with this buffer, and then drop and deallocate the buffer.
@ -262,22 +262,22 @@ where F: FnOnce(&mut [MaybeUninit<T>]) -> U
/// See `alloca()`.
#[inline] pub fn stackalloc_with<T, U, F, I>(size: usize, mut init_with: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: FnMut() -> T
I: FnMut() -> T
{
stackalloc_uninit(size, move |buf| {
buf.fill_with(move || MaybeUninit::new(init_with()));
buf.fill_with(move || MaybeUninit::new(init_with()));
// SAFETY: We have initialised the buffer above
let buf = unsafe { slice_assume_init_mut(buf) };
let ret = callback(buf);
if mem::needs_drop::<T>()
{
// SAFETY: We have initialised the buffer above
let buf = unsafe { slice_assume_init_mut(buf) };
let ret = callback(buf);
if mem::needs_drop::<T>()
{
// SAFETY: We have initialised the buffer above
unsafe {
ptr::drop_in_place(buf as *mut _);
}
unsafe {
ptr::drop_in_place(buf as *mut _);
}
ret
})
}
ret
})
}
/// Allocate a runtime length slice of `T` on the stack, fill it by cloning `init`, call `callback` with this buffer, and then drop and deallocate the buffer.
@ -287,7 +287,7 @@ I: FnMut() -> T
/// See `alloca()`.
#[inline] pub fn stackalloc<T, U, F>(size: usize, init: T, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
T: Clone
T: Clone
{
stackalloc_with(size, move || init.clone(), callback)
}
@ -300,10 +300,84 @@ T: Clone
/// See `alloca()`.
#[inline] pub fn stackalloc_with_default<T, U, F>(size: usize, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
T: Default
T: Default
{
stackalloc_with(size, T::default, callback)
}
/// Collect an iterator into a stack allocated buffer up to `size` elements, call `callback` with this buffer, and then drop and deallocate the buffer.
///
/// See `stackalloc()`.
///
/// # Size
/// We will only take up to `size` elements from the iterator, the rest of the iterator is dropped.
/// If the iterator yield less elements than `size`, then the slice passed to callback will be smaller than `size` and only contain the elements actually yielded.
#[inline] pub fn stackalloc_with_iter<I, T, U, F>(size: usize, iter: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: IntoIterator<Item = T>,
{
stackalloc_uninit(size, move |buf| {
let mut done = 0;
for (d, s) in buf.iter_mut().zip(iter.into_iter())
{
*d = MaybeUninit::new(s);
done+=1;
}
// SAFETY: We just initialised `done` elements of `buf` above.
let buf = unsafe {
slice_assume_init_mut(&mut buf[..done])
};
let ret = callback(buf);
if mem::needs_drop::<T>()
{
// SAFETY: We have initialised the `buf` above
unsafe {
ptr::drop_in_place(buf as *mut _);
}
}
ret
})
}
/// Collect an exact size iterator into a stack allocated slice, call `callback` with this buffer, and then drop and deallocate the buffer.
///
/// See `stackalloc_with_iter()`.
///
/// # Size
/// If the implementation of `ExactSizeIterator` on `I` is incorrect and reports a longer length than the iterator actually produces, then the slice passed to `callback` is shortened to the number of elements actually produced.
#[inline] pub fn stackalloc_from_iter_exact<I, T, U, F>(iter: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: IntoIterator<Item = T>,
I::IntoIter: ExactSizeIterator,
{
let iter = iter.into_iter();
stackalloc_with_iter(iter.len(), iter, callback)
}
/// Collect an iterator into a stack allocated buffer, call `callback` with this buffer, and then drop and deallocate the buffer.
///
/// # Safety
/// While the slice passed to `callback` is guaranteed to be safe to use, regardless of if the iterator fills (or tries to overfill) it, this function is still marked as `unsafe` because it trusts the iterator `I` reports an accurate length with its `size_hint()`.
/// It is recommended to instead use `stackalloc_with_iter()` specifying a strict upper bound on the buffer's size, or `stackalloc_from_iter_exact()` for `ExactSizeIterator`s, as this function may allocate far more, or far less (even 0) memory needed to hold all the iterator's elements; therefore this function will very easily not work properly and/or cause stack overflow if used carelessly.
///
/// If the standard library's `std::iter::TrustedLen` trait becomes stablised, this function will be changed to require that as a bound on `I` and this function will no longer be `unsafe`.
///
/// # Size
/// The size allocated for the buffer will be the upper bound of the iterator's `size_hint()` if one exists. If not, then the size allocated will be the lower bound of `size_hint()`.
/// This can potentially result in only some of the iterator being present in the buffer, or the buffer allocated being much larger than the iterator itself.
/// If this iterator does not have a good `size_hint()` for this purpose, use `stackalloc_with_iter()`, or `stackalloc_from_iter_exact()` if the iterator has an exact size.
#[inline] pub unsafe fn stackalloc_from_iter_trusted<I, T, U, F>(iter: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: IntoIterator<Item = T>,
{
let iter = iter.into_iter();
stackalloc_with_iter(match iter.size_hint() {
(_, Some(x)) |
(x, _) => x,
}, iter, callback)
}
#[cfg(test)]
mod tests;

@ -1,5 +1,37 @@
//! Contains tests and benchmarks
#[test]
fn unknown_size_iter()
{
let iter = 10..;
let result: u64 = iter.clone().take(10).sum();
assert_eq!(result, super::stackalloc_with_iter(10, iter, |buf| buf.iter().copied().sum::<u64>()));
}
fn unknown_size_iter_len() {
assert_eq!(super::stackalloc_with_iter(1024, 0..100, |buf| buf.len()), 100);
}
#[test]
fn exact_size_iter()
{
let iter = vec![
1,
2,
13,
24,
100,
];
let len = iter.len();
let result: u64 = iter.iter().copied().sum();
assert_eq!(super::stackalloc_from_iter_exact(iter, |buf| {
assert_eq!(buf.len(), len);
buf.iter().copied().sum::<u64>()
}), result);
}
#[test]
#[should_panic]
fn unwinding_over_boundary()
@ -149,7 +181,7 @@ mod bench
black_box(crate::alloca(SIZE, |b| {black_box(b);}));
})
}
#[bench]
fn stackalloc_of_zeroed_bytes_known(b: &mut Bencher)
{

Loading…
Cancel
Save