Compare commits

...

13 Commits
avec ... master

Author SHA1 Message Date
Avril 51bc5113b5
Bump version 1.2.1: change core::intrinsics::r#try -> catch_unwind (https://github.com/notflan/stackalloc-rs/pull/2)
7 days ago
Avril 54009f74e8
Merge pull request #2 from elrafoon/catch_unwind
7 days ago
Avril 247895452c
Added feature `no_unwind_protection` for tests. build.rs: Added `rustc-check-cfg` for cfg flag `nightly`.
7 days ago
Stanislav Ravas a702bc88ff fix: change core::intrinsics::r#try -> catch_unwind
3 weeks ago
Avril 391a537107
Version bump (minor): New feature `no_std`. (https://github.com/notflan/stackalloc-rs/pull/1)
2 years ago
Avril ebb4f8d653
Merge pull request #1 from ParkerTenBroeck/master
2 years ago
Parker TenBroeck 84255549df Added catch_unwind for no_std
2 years ago
Parker TenBroeck 72d2860851 fixed test feature not being enabled for nightly no_std
2 years ago
Parker TenBroeck 58ee35daad fixed test dependency not being present for tests on nightly
2 years ago
Parker TenBroeck dc421e002b added optional no_std feature
2 years ago
Avril be0bc7b612
Added PLT for C trampolnine.
2 years ago
Avril 3a42d22081
fix linking error
3 years ago
Avril 675cd7cf37
added iterator functions
3 years ago

@ -1,6 +1,6 @@
[package]
name = "stackalloc"
version = "1.0.1"
version = "1.2.1"
homepage = "https://git.flanchan.moe/flanchan/stackalloc-rs"
repository = "https://github.com/notflan/stackalloc-rs"
keywords = ["alloca", "stack", "stack-allocation", "safe"]
@ -15,3 +15,10 @@ lazy_static = "1.4.0"
[build-dependencies]
cc = "1.0"
rustc_version = "0.2"
[features]
default = []
no_std = []
# for tests
no_unwind_protection = []

@ -11,6 +11,9 @@ However, it is still possible to cause a stack overflow by allocating too much m
# Requirements
The crate works on stable or nightly Rust, but a C99-compliant compiler is required to build.
# Features
* `no_std` - Enabled `no_std` support on nightly toolchains.
# Examples
Allocating a byte buffer on the stack.
```rust

@ -13,11 +13,10 @@ fn build_tramp()
.flag("-Werror")
.flag("-pedantic")
.opt_level(3)
.flag_if_supported("-flto")
// Not sure if we want these two. We can check the codegen later.
// .pic(false)
.use_plt(false)
// .use_plt(false)
.file("alloca_trampoline_.c")
.compile("calloca_trampoline");
@ -26,6 +25,7 @@ fn build_tramp()
fn main() {
// Assert we haven't travelled back in time
assert!(version().unwrap().major >= 1);
println!("cargo::rustc-check-cfg=cfg(nightly)");
// Set cfg flags depending on release channel
match version_meta().unwrap().channel {

@ -68,8 +68,8 @@ impl<'a, T> Drop for AVec<'a, T>
/*
for x in slice::from_raw_parts_mut(self.inner.stack.buf_ptr, self.fill_ptr())
{
std::ptr::drop_in_place(x.as_mut_ptr());
}*/
std::ptr::drop_in_place(x.as_mut_ptr());
}*/
}
}
}
@ -90,7 +90,7 @@ impl<'a, T> AVec<'a, T>
/// Have the elements been moved to the heap?
pub fn is_allocated(&self) -> bool
{
self.fill_ptr() > self.stack_sz
self.fill_ptr() >= self.stack_sz
}
/// Create a new `AVec` with this backing buffer.
@ -109,4 +109,52 @@ impl<'a, T> AVec<'a, T>
_stack: PhantomData
}
}
fn move_to_heap(&mut self)
{
let buf: Vec<T> = unsafe {
slice::from_raw_parts(self.inner.stack.buf_ptr as *const MaybeUninit<T>, self.fill_ptr()).iter().map(|x| x.as_ptr().read()).collect()
};
self.inner = Internal {
heap: ManuallyDrop::new(HeapBuffer {
_fill_ptr: self.stack_sz,
buf,
}),
};
}
/// Insert an element into this `AVec`.
pub fn push(&mut self, item: T)
{
if self.is_allocated()
{
unsafe {
(*self.inner.heap).buf.push(item)
}
} else {
unsafe {
let ptr = self.inner.stack.fill_ptr;
*self.inner.stack.buf_ptr.add(ptr) = MaybeUninit::new(item);
self.inner.stack.fill_ptr += 1;
if self.is_allocated() {
// Move all items to heap
self.move_to_heap();
}
}
}
}
/// The number of elements in this `AVec`.
pub fn len(&self) -> usize
{
if self.is_allocated()
{
unsafe {
self.inner.heap.buf.len()
}
} else {
self.fill_ptr()
}
}
}

@ -1,4 +1,4 @@
use std::ffi::c_void;
use core::ffi::c_void;
pub type CallbackRaw = unsafe extern "C" fn (ptr: *mut c_void, data: *mut c_void)->();

@ -89,9 +89,16 @@
#![allow(dead_code)]
#[cfg(nightly)] extern crate test;
use std::{
#![cfg_attr(all(feature = "no_std", not(test)), no_std)]
#![cfg_attr(all(feature = "no_std", not(feature="no_unwind_protection")), feature(core_intrinsics))]
// NOTE: This feature `no_unwind_protection` doesn't actually exist at the moment; since a binary crate built with #![no_std] will not be using a stable compiler toolchain. It was just for testing.
#[cfg(all(nightly, test))] extern crate test;
#[allow(unused)]
use core::{
mem::{
self,
MaybeUninit,
@ -106,7 +113,12 @@ use std::{
ptr,
};
//TODO: pub mod avec; pub use avec::AVec;
#[cfg(not(feature = "no_std"))]
pub mod avec;
#[cfg(not(feature = "no_std"))]
pub use avec::AVec;
mod ffi;
/// Allocate a runtime length uninitialised byte buffer on the stack, call `callback` with this buffer, and then deallocate the buffer.
@ -157,7 +169,15 @@ where F: FnOnce(&mut [MaybeUninit<u8>]) -> T
unsafe {
let slice = slice::from_raw_parts_mut(allocad_ptr as *mut MaybeUninit<u8>, size);
let callback = ManuallyDrop::take(&mut callback);
rval = MaybeUninit::new(panic::catch_unwind(AssertUnwindSafe(move || callback(slice))));
#[cfg(feature = "no_std")]
{
rval = MaybeUninit::new(catch_unwind(move||{callback(slice)}));
}
#[cfg(not(feature = "no_std"))]
{
rval = MaybeUninit::new(std::panic::catch_unwind(AssertUnwindSafe(move || callback(slice))));
}
}
};
@ -174,14 +194,68 @@ where F: FnOnce(&mut [MaybeUninit<u8>]) -> T
}
let rval = unsafe {
ffi::alloca_trampoline(size, create_trampoline(&callback), &mut callback as *mut _ as *mut c_void);
rval.assume_init()
ffi::alloca_trampoline(size, create_trampoline(&callback), &mut callback as *mut _ as *mut c_void);
rval.assume_init()
};
#[cfg(not(feature = "no_std"))]
match rval
{
Ok(v) => v,
Err(pan) => panic::resume_unwind(pan),
Ok(v) => v,
Err(pan) => std::panic::resume_unwind(pan),
}
#[cfg(feature = "no_std")]
return match rval{
Ok(v) => v,
Err(()) => core::panic!(),
}
}
#[cfg(all(feature = "no_std", feature = "no_unwind_protection"))]
unsafe fn catch_unwind<R, F: FnOnce() -> R>(f: F) -> Result<R, ()> {
// Catching unwinds disabled for this build for now since it requires core intrinsics.
Ok(f())
}
#[cfg(all(feature = "no_std", not(feature = "no_unwind_protection")))]
unsafe fn catch_unwind<R, F: FnOnce() -> R>(f: F) -> Result<R, ()>{
union Data<F, R> {
f: ManuallyDrop<F>,
r: ManuallyDrop<R>,
p: (),
}
#[inline]
fn do_call<F: FnOnce() -> R, R>(data: *mut u8) {
unsafe {
let data = data as *mut Data<F, R>;
let data = &mut (*data);
let f = ManuallyDrop::take(&mut data.f);
data.r = ManuallyDrop::new(f());
}
}
#[inline]
fn do_catch<F: FnOnce() -> R, R>(data: *mut u8, _payload: *mut u8) {
unsafe {
let data = data as *mut Data<F, R>;
let data = &mut (*data);
data.p = ()
}
}
let mut data = Data { f: ManuallyDrop::new(f) };
let data_ptr = &mut data as *mut _ as *mut u8;
if core::intrinsics::catch_unwind(do_call::<F, R>, data_ptr, do_catch::<F, R>) == 0{
Result::Ok(ManuallyDrop::into_inner(data.r))
}else{
Result::Err(())
}
}
@ -192,7 +266,7 @@ pub mod helpers {
use super::*;
#[inline(always)] pub(crate) fn align_buffer_to<T>(ptr: *mut u8) -> *mut T
{
use std::mem::align_of;
use core::mem::align_of;
((ptr as usize) + align_of::<T>() - (ptr as usize) % align_of::<T>()) as *mut T
}
@ -228,12 +302,12 @@ use helpers::*;
where F: FnOnce(&mut [u8]) -> T
{
alloca(size, move |buf| {
// SAFETY: We zero-initialise the backing slice
callback(unsafe {
ptr::write_bytes(buf.as_mut_ptr(), 0, buf.len()); // buf.fill(MaybeUninit::zeroed());
slice_assume_init_mut(buf)
})
// SAFETY: We zero-initialise the backing slice
callback(unsafe {
ptr::write_bytes(buf.as_mut_ptr(), 0, buf.len()); // buf.fill(MaybeUninit::zeroed());
slice_assume_init_mut(buf)
})
})
}
@ -245,14 +319,14 @@ where F: FnOnce(&mut [u8]) -> T
#[inline] pub fn stackalloc_uninit<T, U, F>(size: usize, callback: F) -> U
where F: FnOnce(&mut [MaybeUninit<T>]) -> U
{
let size_bytes = (std::mem::size_of::<T>() * size) + std::mem::align_of::<T>();
let size_bytes = (core::mem::size_of::<T>() * size) + core::mem::align_of::<T>();
alloca(size_bytes, move |buf| {
let abuf = align_buffer_to::<MaybeUninit<T>>(buf.as_mut_ptr() as *mut u8);
debug_assert!(buf.as_ptr_range().contains(&(abuf as *const _ as *const MaybeUninit<u8>)));
unsafe {
callback(slice::from_raw_parts_mut(abuf, size))
}
})
let abuf = align_buffer_to::<MaybeUninit<T>>(buf.as_mut_ptr() as *mut u8);
debug_assert!(buf.as_ptr_range().contains(&(abuf as *const _ as *const MaybeUninit<u8>)));
unsafe {
callback(slice::from_raw_parts_mut(abuf, size))
}
})
}
/// Allocate a runtime length slice of `T` on the stack, fill it by calling `init_with`, call `callback` with this buffer, and then drop and deallocate the buffer.
@ -262,22 +336,22 @@ where F: FnOnce(&mut [MaybeUninit<T>]) -> U
/// See `alloca()`.
#[inline] pub fn stackalloc_with<T, U, F, I>(size: usize, mut init_with: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: FnMut() -> T
I: FnMut() -> T
{
stackalloc_uninit(size, move |buf| {
buf.fill_with(move || MaybeUninit::new(init_with()));
buf.fill_with(move || MaybeUninit::new(init_with()));
// SAFETY: We have initialised the buffer above
let buf = unsafe { slice_assume_init_mut(buf) };
let ret = callback(buf);
if mem::needs_drop::<T>()
{
// SAFETY: We have initialised the buffer above
let buf = unsafe { slice_assume_init_mut(buf) };
let ret = callback(buf);
if mem::needs_drop::<T>()
{
// SAFETY: We have initialised the buffer above
unsafe {
ptr::drop_in_place(buf as *mut _);
}
unsafe {
ptr::drop_in_place(buf as *mut _);
}
ret
})
}
ret
})
}
/// Allocate a runtime length slice of `T` on the stack, fill it by cloning `init`, call `callback` with this buffer, and then drop and deallocate the buffer.
@ -287,7 +361,7 @@ I: FnMut() -> T
/// See `alloca()`.
#[inline] pub fn stackalloc<T, U, F>(size: usize, init: T, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
T: Clone
T: Clone
{
stackalloc_with(size, move || init.clone(), callback)
}
@ -300,10 +374,84 @@ T: Clone
/// See `alloca()`.
#[inline] pub fn stackalloc_with_default<T, U, F>(size: usize, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
T: Default
T: Default
{
stackalloc_with(size, T::default, callback)
}
/// Collect an iterator into a stack allocated buffer up to `size` elements, call `callback` with this buffer, and then drop and deallocate the buffer.
///
/// See `stackalloc()`.
///
/// # Size
/// We will only take up to `size` elements from the iterator, the rest of the iterator is dropped.
/// If the iterator yield less elements than `size`, then the slice passed to callback will be smaller than `size` and only contain the elements actually yielded.
#[inline] pub fn stackalloc_with_iter<I, T, U, F>(size: usize, iter: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: IntoIterator<Item = T>,
{
stackalloc_uninit(size, move |buf| {
let mut done = 0;
for (d, s) in buf.iter_mut().zip(iter.into_iter())
{
*d = MaybeUninit::new(s);
done+=1;
}
// SAFETY: We just initialised `done` elements of `buf` above.
let buf = unsafe {
slice_assume_init_mut(&mut buf[..done])
};
let ret = callback(buf);
if mem::needs_drop::<T>()
{
// SAFETY: We have initialised the `buf` above
unsafe {
ptr::drop_in_place(buf as *mut _);
}
}
ret
})
}
/// Collect an exact size iterator into a stack allocated slice, call `callback` with this buffer, and then drop and deallocate the buffer.
///
/// See `stackalloc_with_iter()`.
///
/// # Size
/// If the implementation of `ExactSizeIterator` on `I` is incorrect and reports a longer length than the iterator actually produces, then the slice passed to `callback` is shortened to the number of elements actually produced.
#[inline] pub fn stackalloc_from_iter_exact<I, T, U, F>(iter: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: IntoIterator<Item = T>,
I::IntoIter: ExactSizeIterator,
{
let iter = iter.into_iter();
stackalloc_with_iter(iter.len(), iter, callback)
}
/// Collect an iterator into a stack allocated buffer, call `callback` with this buffer, and then drop and deallocate the buffer.
///
/// # Safety
/// While the slice passed to `callback` is guaranteed to be safe to use, regardless of if the iterator fills (or tries to overfill) it, this function is still marked as `unsafe` because it trusts the iterator `I` reports an accurate length with its `size_hint()`.
/// It is recommended to instead use `stackalloc_with_iter()` specifying a strict upper bound on the buffer's size, or `stackalloc_from_iter_exact()` for `ExactSizeIterator`s, as this function may allocate far more, or far less (even 0) memory needed to hold all the iterator's elements; therefore this function will very easily not work properly and/or cause stack overflow if used carelessly.
///
/// If the standard library's `std::iter::TrustedLen` trait becomes stablised, this function will be changed to require that as a bound on `I` and this function will no longer be `unsafe`.
///
/// # Size
/// The size allocated for the buffer will be the upper bound of the iterator's `size_hint()` if one exists. If not, then the size allocated will be the lower bound of `size_hint()`.
/// This can potentially result in only some of the iterator being present in the buffer, or the buffer allocated being much larger than the iterator itself.
/// If this iterator does not have a good `size_hint()` for this purpose, use `stackalloc_with_iter()`, or `stackalloc_from_iter_exact()` if the iterator has an exact size.
#[inline] pub unsafe fn stackalloc_from_iter_trusted<I, T, U, F>(iter: I, callback: F) -> U
where F: FnOnce(&mut [T]) -> U,
I: IntoIterator<Item = T>,
{
let iter = iter.into_iter();
stackalloc_with_iter(match iter.size_hint() {
(_, Some(x)) |
(x, _) => x,
}, iter, callback)
}
#[cfg(test)]
mod tests;

@ -1,5 +1,38 @@
//! Contains tests and benchmarks
#[test]
fn unknown_size_iter()
{
let iter = 10..;
let result: u64 = iter.clone().take(10).sum();
assert_eq!(result, super::stackalloc_with_iter(10, iter, |buf| buf.iter().copied().sum::<u64>()));
}
fn unknown_size_iter_len() {
assert_eq!(super::stackalloc_with_iter(1024, 0..100, |buf| buf.len()), 100);
}
#[test]
fn exact_size_iter()
{
let iter = vec![
1,
2,
13,
24,
100,
];
let len = iter.len();
let result: u64 = iter.iter().copied().sum();
assert_eq!(super::stackalloc_from_iter_exact(iter, |buf| {
assert_eq!(buf.len(), len);
buf.iter().copied().sum::<u64>()
}), result);
}
#[cfg(all(nightly, not(feature="no_std")))] // XXX: process will abort on no_std. This is expected, but won't "pass" this test.
#[test]
#[should_panic]
fn unwinding_over_boundary()
@ -149,7 +182,7 @@ mod bench
black_box(crate::alloca(SIZE, |b| {black_box(b);}));
})
}
#[bench]
fn stackalloc_of_zeroed_bytes_known(b: &mut Bencher)
{

Loading…
Cancel
Save