#include #include #include #include #include #include template using type_hash_t = decltype(types::type_hash()); template using type_hash_ref = std::remove_reference_t> const&; template using type_hash_ptr = std::remove_reference_t> const*; extern "C" { base_allocator::~base_allocator() {} } namespace alloc { // Base class for all deallocations that happen within an `anon_raw_secmem` (managed, static, polymorphic, unmanaged, etc.) (TODO: See below, the base should be moved into the header so typed children can be templated...) FrozenAllocator::deleter::~deleter() { //apply_finalizer_group(); // XXX: Do we actually need finalizer groups now? (see note below about `m_values`. } FrozenAllocator::deleter::deleter(std::shared_ptr&& p) : m_manager_ref(std::move(p)) {} // , m_gorup_ptr(static_cast(this)) {} void scramble_memory(std::initializer_list> ptrs) const noexcept { for(auto [ps, pe] : ptrs) { // ps: start of range, pe: end of range (or nullptr, if range must be looked up in `m_manager_ref`.) if(UNLIKELY(!pe)) { if(const auto& alloc = m_manager_ref->lookup_alloc(ps)) pe = alloc.range().second; else continue; } else if(UNLIKELY(ps == pe)) continue; intptr_t psa = std::to_address(ps), pea = std::to_address(pe); ASSUME(pea > psa); explicit_bzero(ps, util::ptr_diff(psa, pea)); } } [[gnu::nonnull(2)]] std::pair FrozenAllocator::deleter::apply_delete(void* restrict p, bool clean) const noexcept { //(XXX: NOTE: `m_values` map removal *causes* this to be invoked, i.e: A value removed from the map calls `deleter::operator()(uniq_p)`, which then calls `apply_delete(uniq_p, true)` // Lookup the allocation info for pointer `p`. const auto& alloc = m_manager_ref->lookup_alloc(p); if(UNLIKELY(!alloc)) return {p, p}; // TODO: allow the allocation (the memory corresponding to `p` from `m_manager_ref`) to be mutable (`munlock()` it.) // Get the full range (including alignment padding) auto al_range = alloc.range(); // Then, if `clean == true` (default), `bzero_explicit()` the memory (range for `p` obtained from `m_manager_ref`.) if(LIKELY(clean)) scramble_memory(al_range); // Return the range (or the end pointer of `p`'s range for use in / as an iterator.) return al_range; } // This is the *true manager* of the allocation arena, when it is destroyed, the memory is cleared struct FrozenAllocator::anon_raw_secmem final : virtual id::unique { // Deleter that works on `alloc_value`s directly struct deleter final : public deleter_for { //TODO: This ^ private: void apply_delete_typed(alloc_value* ptr) const noexcept override { //TODO: Destroy the typed object inside `ptr`. (XXX: Where should destructor info be kept? In `alloc_value` or `alloc_info`? I think `alloc_value`. // Run `alloc_value` itself's dtor now. deleter_for::apply_delete_typed(ptr); } }; virtual ~anon_raw_secmem() { //TODO: Clear and `munmap()` the used page(s) //XXX: Due to how this is managed via `shared_ptr<>`, it is UB for this to be called *before* all actual allocation of the memory are unallocated (via `deleter`, which they must *all* be managed by. } }; struct FrozenAllocator::alloc_info { id::uuid alloc_id; // ID of this allocation id::unique_ref owner; // Reference to the unique ID of the `anon_raw_secmem` that manages this allocation. type_hash_ptr type; // Static pointer to the `types::type_hash()` result of the `T` that this allocation is. (Basic RTTI: `type_hash_t` should only exist in static storage, otherwise we use `type_hash_ref` or `type_hash_ptr`.) struct { size_t size, align; } meta; void* area_start; void* area; }; struct FrozenAllocator::alloc_value { typedef bool (*vt_ctor)(alloc_value* to, ...); typedef bool (*vt_copy)(const alloc_value& from, alloc_value* to); typedef bool (*vt_move)(alloc_value&& from, alloc_value* to); typedef bool (*vt_assign_copy)(const alloc_value& from, alloc_value* to); typedef bool (*vt_assign_move)(alloc_value&& from, alloc_value* to); typedef bool (*vt_destroy)(alloc_value* obj); typedef void* (*vt_this)(const alloc_value& from); typedef bool (*vt_cast_into)(const alloc_value& a, alloc_value* b); /// vt_type_info: if info not null: return if `a` is type referred to in `info`, else if `type` not null: set `*type` to be type of `a`, return false if that is not possible with the other arguments given. typedef bool (*vt_type_info)(const alloc_value& a, const alloc_info* info, type_hash_ptr *restrict type); //TODO: How to create? Overloaded operator placement new, inside `alloc_info` or `anon_raw_secmem`? Since the storage for these are allocated and managed *by* `anon_raw_secmem`, that would make the most sense I think... `alloc_info` holds the pointer to the specific allocation, its ID, etc; stuff for ordered allocation lookup. This is managed (entirely) by `anon_raw_secmem`, and `std::unique_ptr` ensures it is not deleted naturally, but only removed from `anon_raw_secmem`. //! Basic RTTI impl that holds type-erased, alignment padded values and gives out aligned void* pointers to it. //! NOTE: This class does *not* apply any destructor when destroyed, `anon_raw_secmem::deleter` should be used for that. struct { struct { vt_ctor _create; vt_copy _copy; vt_move _move; vt_destroy _destroy; vt_this _this; vt_type_info _typeinfo; // We don't need the others, they can be constructed by combining calls to these. (e.g.: assign_copy(new, old) = `destroy(old), copy(new, old)`.) } vt; size_t size, align; } meta; unsigned char data[]; }; struct FrozenAllocator::_impl { std::shared_ptr m_manager; std::map> m_values; }; #define $CLASS FrozenAllocator $ctor_move(m) noexcept : inner_(std::move(m.inner_)) {} $assign_move(m) { if($LIKELY(this != &m)) inner_ = std::move(m.inner_); return *this; } $dtor() {} #undef $CLASS /* FrozenAllocator */ }