//! Extensions use std::{ marker::PhantomData, fmt, ops, }; use smallvec::SmallVec; /// Wrapper to derive debug for types that don't implement it. #[repr(transparent)] #[derive(Clone, PartialEq, Eq, Ord,PartialOrd, Hash)] pub struct OpaqueDebug(T); impl OpaqueDebug { /// Create a new wrapper #[inline] pub const fn new(value: T) -> Self { Self(value) } /// Consume into the value #[inline] pub fn into_inner(self) -> T { self.0 } } impl AsRef for OpaqueDebug { #[inline] fn as_ref(&self) -> &T { &self.0 } } impl AsMut for OpaqueDebug { #[inline] fn as_mut(&mut self) -> &mut T { &mut self.0 } } impl ops::Deref for OpaqueDebug { type Target = T; #[inline] fn deref(&self) -> &Self::Target { &self.0 } } impl ops::DerefMut for OpaqueDebug { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl fmt::Debug for OpaqueDebug { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "") } } /// A trait for types that can insert objects at their end. pub trait BackInserter { /// Insert an object at the end of this container fn push_back(&mut self, value: T); } impl BackInserter for Vec { #[inline] fn push_back(&mut self, value: T) { self.push(value) } } impl BackInserter for SmallVec where T: smallvec::Array { #[inline] fn push_back(&mut self, value: T) { self.push(value) } } /// Absracts a closure for `BackInserter`. pub struct BackInsertPass(F, PhantomData) where F: FnMut(T); impl BackInsertPass { /// Create a new instance with this closure #[inline] pub fn new(func: F) -> Self { Self(func, PhantomData) } } impl BackInserter for BackInsertPass { #[inline] fn push_back(&mut self, value: T) { self.0(value) } } /// A `BackInserter` that will only add a max capacity of items before it starts dropping input to its `push_back` function. pub struct CappedBackInserter<'a, T>(&'a mut T, usize, usize) where T: BackInserter; impl<'a, T> CappedBackInserter<'a, T> where T: BackInserter { /// Create a new instance with this max capacity #[inline] pub fn new(from: &'a mut T, cap: usize) -> Self { Self(from, 0, cap) } /// The number of elements pushed so far #[inline] pub fn len(&self) -> usize { self.1 } /// The max number of elemnts allowed to be pushed #[inline] pub fn cap(&self) -> usize { self.2 } } impl<'a, T> BackInserter for CappedBackInserter<'a, T> where T: BackInserter { #[inline] fn push_back(&mut self, value: T) { if self.1 < self.2 { self.0.push_back(value); self.1+=1; } } } impl BackInserter for Option { fn push_back(&mut self, value: T) { *self = Some(value); } } pub trait VecExt { /// Insert many elements with exact size iterator fn insert_exact>(&mut self, location: usize, slice: I) where Ex: ExactSizeIterator; /// Insert many elements fn insert_many>(&mut self, location: usize, slice: I); } impl VecExt for Vec { #[cfg(not(feature="experimental_inserter"))] #[inline(always)] fn insert_exact>(&mut self, location: usize, slice: I) where Ex: ExactSizeIterator { self.insert_many(location, slice) } #[cfg(feature="experimental_inserter")] fn insert_exact>(&mut self, location: usize, slice: I) where Ex: ExactSizeIterator, { #[inline(never)] #[cold] fn panic_len(l1: usize, l2: usize) -> ! { panic!("Location must be in range 0..{}, got {}", l1,l2) } #[inline(never)] #[cold] fn inv_sz() -> ! { panic!("ExactSizeIterator returned invalid size"); } if location >= self.len() { panic_len(self.len(), location); } let mut slice = slice.into_iter(); let slen = slice.len(); match slen { 0 => return, 1 => { self.insert(location, slice.next().unwrap()); return }, _ => (), }; self.reserve(slice.len()); unsafe { let this = self.as_mut_ptr().add(location); let len = self.len(); let rest = std::mem::size_of::() * (location..len).len(); libc::memmove(this.add(slen) as *mut libc::c_void, this as *mut libc::c_void, rest); let mut sent=0; match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { let mut this = this; for item in slice { if sent >= slen { inv_sz(); } this.write(item); this = this.add(1); sent+=1; } if sent != slen { inv_sz(); } })) { Err(e) => { // memory at (location+sent)..slen is now invalid, move the old one back before allowing unwind to contine libc::memmove(this.add(sent) as *mut libc::c_void, this.add(slen) as *mut libc::c_void, rest); self.set_len(len + sent); std::panic::resume_unwind(e) }, _ => (), } self.set_len(len + sent); } } #[inline] fn insert_many>(&mut self, location: usize, slice: I) { let slice = slice.into_iter(); match slice.size_hint() { (0, Some(0)) | (0, None) => (), (_, Some(bound)) | (bound, _) => self.reserve(bound), }; self.splice(location..location, slice); //let splice = self.split_off(location); //self.extend(slice.chain(splice.into_iter())); /* // shift everything across, replacing with the new values let splice: Vec<_> = self.splice(location.., slice).collect(); // ^ -- this allocation bugs me, but we violate aliasing rules if we don't somehow collect it before adding it back in so... // add tail back self.extend(splice);*/ } } #[cfg(test)] mod tests { use super::*; #[test] fn vec_insert_exact() { let mut vec = vec![0,1,2,8,9,10]; vec.insert_exact(3, [3,4,5,6, 7].iter().copied()); assert_eq!(&vec[..], &[0,1,2,3,4,5,6,7,8,9,10] ); } #[test] fn vec_insert_exact_nt() { macro_rules! string { ($str:literal) => (String::from($str)); } let mut vec = vec![ string!("Hello"), string!("world"), string!("foo"), string!("uhh"), ]; let vec2 = vec![ string!("Hello"), string!("world"), string!("hi"), string!("hello"), string!("foo"), string!("uhh"), ]; vec.insert_exact(2, vec![string!("hi"), string!("hello")]); assert_eq!(&vec[..], &vec2[..]); } #[cfg(feature="nightly")] mod benchmatks { use super::super::*; use test::{ Bencher, black_box, }; #[cfg(not(feature="experimental_inserter"))] #[bench] fn move_exact(b: &mut Bencher) { let mut vec = vec![0,10,11,12]; let span = [0,1,2,3]; b.iter(|| { black_box(vec.insert_exact(vec.len()/2, span.iter().copied())); }); } #[bench] fn move_via_splice(b: &mut Bencher) { let mut vec = vec![0,10,11,12]; let span = [0,1,2,3]; b.iter(|| { black_box(vec.insert_many(vec.len()/2, span.iter().copied())); }); } #[cfg(feature="experimental_inserter")] #[bench] fn move_via_unsafe(b: &mut Bencher) { let mut vec = vec![0,10,11,12]; let span = [0,1,2,3]; b.iter(|| { black_box(vec.insert_exact(vec.len()/2, span.iter().copied())); }); } } } #[macro_export] macro_rules! id_type { ($name:ident $(; $doc:literal)?) => { $(#[doc(comment=$doc)])? #[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, ::serde::Serialize, ::serde::Deserialize)] pub struct $name(uuid::Uuid); impl $name { /// Create a random new unique ID #[inline] fn id_new() -> Self { Self(::uuid::Uuid::new_v4()) } /// Create from a UUID #[inline] fn id_from(from: ::uuid::Uuid) -> Self { Self(from) } } }; } mod global_counter { use std::sync::atomic::{ Ordering, AtomicU64, }; #[derive(Debug)] pub struct GlobalCounter(AtomicU64); impl GlobalCounter { /// Get and increment the counter pub fn get(&self) -> u64 { self.0.fetch_add(1, Ordering::SeqCst) } /// Check if this `u64` is valid to have come from this counter. pub fn valid(&self, val: u64) -> bool { val <= self.0.load(Ordering::Acquire) } /// Create a new global counter. pub const fn new() -> Self { Self(AtomicU64::new(0)) } } } pub use global_counter::GlobalCounter; const GLOBAL_SALT_SIZE: usize = 16; lazy_static! { pub static ref GLOBAL_SALT: &'static [u8] = { let mut this = Box::new([0u8; GLOBAL_SALT_SIZE]); getrandom::getrandom(&mut this[..]).expect("Failed to populate global salt"); &Box::leak(this)[..] }; } /// A wrapper for hashing with a specific salt. #[derive(Debug, Hash)] pub struct Salted<'a, T: std::hash::Hash>(&'a T, &'a [u8]); impl<'a, T> Salted<'a, T> where T: std::hash::Hash { /// Create a new wrapper. pub fn new(val: &'a T, salt: &'a [u8]) -> Self { Self(val, &salt) } } /// A wrapper for hashing with the global salt. #[derive(Debug, Hash)] pub struct GloballySalted<'a, T: std::hash::Hash>(&'a T, &'static [u8]); impl<'a, T> GloballySalted<'a, T> where T: std::hash::Hash { /// Create a new wrapper. pub fn new(val: &'a T) -> Self { Self(val, &GLOBAL_SALT[..]) } } mod sha256_hasher { use std::mem::size_of; use sha2::{ Digest, Sha256, }; use std::hash::{ Hasher, Hash, }; use cryptohelpers::sha256::Sha256Hash; struct Sha256Hasher(Sha256); impl Sha256Hasher { pub fn new() -> Self { Self(Sha256::new()) } } impl Hasher for Sha256Hasher { fn write(&mut self, bytes: &[u8]) { self.0.update(bytes); } fn finish(&self) -> u64 { let ar = self.0.clone().finalize(); let mut rest = [0u8; size_of::()]; crate::bytes::move_slice(&mut rest[..], &ar[..]); u64::from_le_bytes(rest) } } pub trait Sha256HashExt { fn compute_sha256_hash(&self) -> Sha256Hash; } impl Sha256HashExt for T where T: Hash { fn compute_sha256_hash(&self) -> Sha256Hash { let mut hasher = Sha256Hasher::new(); self.hash(&mut hasher); hasher.0.into() } } } pub use sha256_hasher::Sha256HashExt; /// Value may hold one in place or allocate on the heap to hold many. pub type MaybeVec = smallvec::SmallVec<[T; 1]>;