//! Custom allocation framekwork for frozen (RO) and/or secure allocations, shared allocations, aliased allocation, etc. (polymorphic.) #ifndef _ALLOC_H #define _ALLOC_H #ifdef __cplusplus #include #endif #include "constraints.hh" #include "types.h" #include "macros.h" LINK_START(C) typedef struct base_allocator alloc_t; //TODO: C API anonlogue for allocator interface below. LINK_END #if $CXX extern "C" { struct base_allocator { //TODO: Allocator interface... virtual ~base_allocator(); } } namespace alloc { class FrozenAllocator : public alloc_t { struct _impl; protected: struct anon_raw_secmem; // Untyped deleter struct deleter { friend class anon_raw_secmem; constexpr deleter(const deleter&) noexcept = default; constexpr deleter& operator=(const deleter&) noexcept = default; constexpr deleter(deleter&&) noexcept = default; constexpr deleter& operator=(deleter&&) noexcept = default; constexpr void operator()(void* restrict p) const noexcept { return apply_delete(p); } virtual ~deleter(); protected: explicit deleter(std::shared_ptr&& p); template constexpr static void* erase_type(T* ptr) noexcept { return dynamic_cast(ptr); } template constexpr static void const* erase_type(T const* ptr) noexcept { return dynamic_cast(ptr); } template constexpr static void* erase_type_unsafe(T* ptr) noexcept { return static_cast(ptr); } template constexpr static T* add_type_unsafe(void* ptr) noexcept { return static_cast(ptr); } template constexpr static void const* erase_type_unsafe(T const* ptr) noexcept { return static_cast(ptr); } template constexpr static T const* add_type_unsafe(void const* ptr) noexcept { return static_cast(ptr); } virtual void apply_delete(void* restrict, bool = true) const noexcept; inline deleter* finalizer_group_id() const noexcept { return m_group_ptr ?: static_cast(this); } // See dtor impl void apply_finalizer_group(std::initializer_list = {}) noexcept; void apply_finalizer_now(std::initializer_list = {}) noexcept; // To prevent anon_raw_secmem being destroyed while there are still allocated values, the base class for the deleter for those values contains a refcount. e.g: `std::unique_ptr>` where: `deleter_for final : public deleter { virtual ~deleter_for(); ... };`, or `std::shared_ptr`, where: `std::shared_ptr>` aliases-ctor(`old, old->value_ptr()`) -> `std::shared_ptr` std::shared_ptr m_manager_ref; private: // Used to sync `this`'s group across copies. deleter* m_group_ptr; }; struct alloc_info; struct alloc_value; template struct deleter_for : virtual deleter { inline deleter_for(std::shared_ptr&& m) : deleter(std::move(m)) {} virtual ~deleter_for() = default; // This will use deleter's dtor to remove allocations. inline void operator()(T* ptr) const noexcept { return apply_delete(deleter::erase_type_unsafe(ptr)); } protected: inline virtual void apply_delete_typed(T* ptr) const noexcept { ptr->~T(); } private: inline void apply_delete(void* restrict up) const noexcept override final { if constexpr(std::is_trivially_destructible_v) { deleter::apply_delete(up); // If the dtor is trivial, ignore it and use default behaviour. } else { deleter::apply_delete(up, false); // Unlock the memory, but do *not* add it to this deleter's finalizer group. apply_delete_typed(static_cast(up)); // Apply the destructor for `T` deleter::apply_finalizer_now({up}); // Get `anon_raw_secmem` to deallocate the memory *now*, instead of at destruction. } } }; public: FrozenAllocator(FrozenAllocator &&) noexcept; FrozenAllocator& operator=(FrozenAllocator &&); FrozenAllocator(const FrozenAllocator&) = delete; FrozenAllocator& operator=(const FrozenAllocator&) = delete; virtual ~FrozenAllocator(); private: std::unique_ptr<_impl> inner_; /// Manages everything about the actual allocations //std::shared_ptr m_manager; /// A map of values inside the allocator. This is destroyed in reverse order, meaning all the living values are destroyed *before* `m_manager`'d destruction deallocates them. //std::map> m_values; }; } #endif #endif /* _ALLOC_H */