638 lines
22 KiB
C++
638 lines
22 KiB
C++
#pragma once
|
|
|
|
#include <algorithm>
|
|
#include <cstddef>
|
|
#include <cstdint>
|
|
#include <cstdlib>
|
|
#include <cstring>
|
|
#include <iostream>
|
|
#include <limits>
|
|
#include <new>
|
|
#include <type_traits>
|
|
#include <utility>
|
|
#include <vector>
|
|
|
|
/**
|
|
* @brief A high-performance arena allocator for bulk allocations.
|
|
*
|
|
* ArenaAllocator provides extremely fast memory allocation (~1ns per
|
|
* allocation) by allocating large blocks and serving allocations from them
|
|
* sequentially. It's designed for scenarios where many small objects need to be
|
|
* allocated and can all be deallocated together.
|
|
*
|
|
* ## Key Features:
|
|
* - **Ultra-fast allocation**: ~1ns per allocation vs ~20-270ns for malloc
|
|
* - **Lazy initialization**: No memory allocated until first use
|
|
* - **Intrusive linked list**: Minimal memory overhead using backward-linked
|
|
* blocks
|
|
* - **Geometric growth**: Block sizes double to minimize allocations
|
|
* - **Memory efficient reset**: Frees unused blocks to prevent memory leaks
|
|
* - **Proper alignment**: Respects alignment requirements for all types
|
|
*
|
|
* ## Performance Characteristics:
|
|
* - Allocation: O(1) amortized
|
|
* - Memory tracking: O(1) using accumulated counters
|
|
* - Reset: O(n) where n is number of blocks (but frees memory)
|
|
* - Destruction: O(n) where n is number of blocks
|
|
*
|
|
* ## Usage Examples:
|
|
* ```cpp
|
|
* // Basic allocation
|
|
* ArenaAllocator arena(1024);
|
|
* void* ptr = arena.allocate(100);
|
|
*
|
|
* // Construct trivially destructible objects in-place
|
|
* int* num = arena.construct<int>(42);
|
|
* MyPOD* obj = arena.construct<MyPOD>(arg1, arg2); // If MyPOD is trivial
|
|
*
|
|
* // Track memory usage
|
|
* size_t total = arena.total_allocated();
|
|
* size_t used = arena.used_bytes();
|
|
*
|
|
* // Reset to reuse first block (frees others)
|
|
* arena.reset();
|
|
* ```
|
|
*
|
|
* ## Memory Management:
|
|
* - Individual objects cannot be freed (by design)
|
|
* - All memory is freed when the allocator is destroyed
|
|
* - reset() frees all blocks except the first one
|
|
* - Move semantics transfer ownership of all blocks
|
|
*
|
|
* ## Thread Safety:
|
|
* Not thread-safe. Use separate instances per thread or external
|
|
* synchronization.
|
|
*/
|
|
class ArenaAllocator {
|
|
private:
|
|
/**
|
|
* @brief Internal block structure for the intrusive linked list.
|
|
*
|
|
* Each block contains:
|
|
* - The actual data storage immediately following the Block header
|
|
* - Backward pointer to previous block (intrusive linked list)
|
|
* - Accumulated counters for O(1) tracking operations
|
|
*/
|
|
struct Block {
|
|
uint32_t size; ///< Size of this block's data area
|
|
uint32_t offset; ///< The offset of the first unused byte in the data area
|
|
size_t total_size; ///< Accumulated size of this block + all previous blocks
|
|
size_t total_used; ///< Accumulated offsets of previous blocks
|
|
Block *prev; ///< Pointer to previous block (nullptr for first block)
|
|
|
|
/**
|
|
* @brief Get pointer to the data area of this block.
|
|
* @return Pointer to the start of the data area (after Block header).
|
|
*/
|
|
char *data() { return reinterpret_cast<char *>(this + 1); }
|
|
|
|
/**
|
|
* @brief Create a new block with the specified size.
|
|
* @param size Size of the data area for this block
|
|
* @param prev Pointer to the previous block (nullptr for first block)
|
|
* @return Pointer to the newly created block
|
|
* @throws std::bad_alloc if memory allocation fails
|
|
*/
|
|
static Block *create(size_t size, Block *prev) {
|
|
if (size > std::numeric_limits<uint32_t>::max()) {
|
|
throw std::bad_alloc();
|
|
}
|
|
void *memory = std::aligned_alloc(
|
|
alignof(Block), align_up(sizeof(Block) + size, alignof(Block)));
|
|
if (!memory) {
|
|
throw std::bad_alloc();
|
|
}
|
|
size_t total_size = size + (prev ? prev->total_size : 0);
|
|
size_t total_used = prev ? prev->total_used + prev->offset : 0;
|
|
Block *block = new (memory)
|
|
Block{uint32_t(size), /*offset*/ 0, total_size, total_used, prev};
|
|
return block;
|
|
}
|
|
};
|
|
|
|
public:
|
|
/**
|
|
* @brief Construct an ArenaAllocator with the specified initial block size.
|
|
*
|
|
* No memory is allocated until the first allocation request (lazy
|
|
* initialization). The initial block size is used for the first block and as
|
|
* the baseline for geometric growth.
|
|
*
|
|
* @param initial_size Size in bytes for the first block (default: 1024)
|
|
*/
|
|
explicit ArenaAllocator(size_t initial_size = 1024)
|
|
: initial_block_size_(initial_size), current_block_(nullptr) {}
|
|
|
|
/**
|
|
* @brief Destructor - frees all allocated blocks.
|
|
*
|
|
* Traverses the intrusive linked list backwards from current_block_,
|
|
* freeing each block. This ensures no memory leaks.
|
|
*/
|
|
~ArenaAllocator();
|
|
|
|
/// Copy construction is not allowed (would be expensive and error-prone)
|
|
ArenaAllocator(const ArenaAllocator &) = delete;
|
|
/// Copy assignment is not allowed (would be expensive and error-prone)
|
|
ArenaAllocator &operator=(const ArenaAllocator &) = delete;
|
|
|
|
/**
|
|
* @brief Move constructor - transfers ownership of all blocks.
|
|
* @param other The ArenaAllocator to move from (will be left empty)
|
|
*/
|
|
ArenaAllocator(ArenaAllocator &&other) noexcept;
|
|
|
|
/**
|
|
* @brief Move assignment operator - transfers ownership of all blocks.
|
|
*
|
|
* Frees any existing blocks in this allocator before taking ownership
|
|
* of blocks from the other allocator.
|
|
*
|
|
* @param other The ArenaAllocator to move from (will be left empty)
|
|
* @return Reference to this allocator
|
|
*/
|
|
ArenaAllocator &operator=(ArenaAllocator &&other) noexcept;
|
|
|
|
/**
|
|
* @brief Allocate raw memory with the specified size and alignment.
|
|
*
|
|
* This is the core allocation method providing ~1ns allocation performance.
|
|
* It performs lazy initialization on first use and automatically grows
|
|
* the arena when needed using geometric growth (doubling block sizes).
|
|
*
|
|
* For type-safe allocation, prefer the allocate<T>() template method.
|
|
*
|
|
* @param size Number of bytes to allocate (0 returns nullptr)
|
|
* @param alignment Required alignment (default: alignof(std::max_align_t))
|
|
* @return Pointer to allocated memory, or nullptr if size is 0
|
|
* @throws std::bad_alloc if memory allocation fails
|
|
*
|
|
* ## Performance:
|
|
* - O(1) amortized allocation time
|
|
* - Respects alignment requirements with minimal padding
|
|
* - Automatically creates new blocks when current block is exhausted
|
|
*
|
|
* ## Example:
|
|
* ```cpp
|
|
* void* ptr1 = arena.allocate_raw(100); // Default alignment
|
|
* void* ptr2 = arena.allocate_raw(64, 16); // 16-byte aligned
|
|
* MyStruct* ptr3 = static_cast<MyStruct*>(
|
|
* arena.allocate_raw(sizeof(MyStruct), alignof(MyStruct)));
|
|
* ```
|
|
*
|
|
* ## Performance Note:
|
|
* This method is kept inline in the header for maximum performance.
|
|
* The allocation path is extremely hot and inlining eliminates function
|
|
* call overhead, allowing the ~1ns allocation performance.
|
|
*/
|
|
void *allocate_raw(uint32_t size,
|
|
size_t alignment = alignof(std::max_align_t)) {
|
|
if (size == 0) {
|
|
return nullptr;
|
|
}
|
|
|
|
if (!current_block_) {
|
|
size_t block_size = std::max(size, initial_block_size_);
|
|
add_block(block_size);
|
|
}
|
|
|
|
char *block_start = current_block_->data();
|
|
uintptr_t block_addr = reinterpret_cast<uintptr_t>(block_start);
|
|
size_t aligned_offset =
|
|
align_up(block_addr + current_block_->offset, alignment) - block_addr;
|
|
|
|
if (aligned_offset + size > current_block_->size) {
|
|
size_t next_block_size = calculate_next_block_size(size);
|
|
add_block(next_block_size);
|
|
block_start = current_block_->data();
|
|
block_addr = reinterpret_cast<uintptr_t>(block_start);
|
|
aligned_offset = align_up(block_addr, alignment) - block_addr;
|
|
}
|
|
|
|
void *ptr = block_start + aligned_offset;
|
|
current_block_->offset = aligned_offset + size;
|
|
|
|
return ptr;
|
|
}
|
|
|
|
/**
|
|
* @brief Reallocate memory, extending in place if possible or copying to a
|
|
* new location.
|
|
*
|
|
* This method provides realloc-like functionality for the arena allocator.
|
|
* If the given pointer was the last allocation and there's sufficient space
|
|
* in the current block to extend it, the allocation is grown in place.
|
|
* Otherwise, a new allocation is made and the old data is copied.
|
|
*
|
|
* @param ptr Pointer to the existing allocation (must be from this allocator)
|
|
* @param old_size Size of the existing allocation in bytes
|
|
* @param new_size Desired new size in bytes
|
|
* @param alignment Required alignment. Defaults to
|
|
* `alignof(std::max_align_t)`
|
|
* @return Pointer to the reallocated memory (may be the same as ptr or
|
|
* different)
|
|
* @throws std::bad_alloc if memory allocation fails
|
|
*
|
|
* ## Behavior:
|
|
* - If new_size == old_size, returns ptr unchanged
|
|
* - If new_size == 0, returns nullptr (no deallocation occurs)
|
|
* - If ptr is null, behaves like allocate(new_size, alignment)
|
|
* - If ptr was the last allocation and space exists, extends in place
|
|
*
|
|
* ## Example:
|
|
* ```cpp
|
|
* void* ptr = arena.allocate_raw(100, alignof(int));
|
|
* // ... use ptr ...
|
|
* ptr = arena.realloc_raw(ptr, 100, 200, alignof(int)); // May extend in
|
|
* place or copy
|
|
* ```
|
|
*
|
|
* ## Safety Notes:
|
|
* - The caller must provide the correct old_size - this is not tracked
|
|
* - The old pointer becomes invalid if a copy occurs
|
|
* - Like malloc/realloc, the contents beyond old_size are uninitialized
|
|
* - When copying to new location, uses the specified alignment
|
|
*/
|
|
void *realloc_raw(void *ptr, uint32_t old_size, uint32_t new_size,
|
|
uint32_t alignment = alignof(std::max_align_t));
|
|
|
|
/**
|
|
* @brief Type-safe version of realloc_raw for arrays of type T.
|
|
*
|
|
* @param ptr Pointer to the existing allocation (must be from this allocator)
|
|
* @param old_size Size of the existing allocation in number of T objects
|
|
* @param new_size Desired new size in number of T objects
|
|
* @return Pointer to the reallocated memory (may be the same as ptr or
|
|
* different)
|
|
* @throws std::bad_alloc if memory allocation fails or size overflow occurs
|
|
*/
|
|
template <typename T>
|
|
T *realloc(T *ptr, uint32_t old_size, uint32_t new_size) {
|
|
if (size_t(new_size) * sizeof(T) > std::numeric_limits<uint32_t>::max()) {
|
|
throw std::bad_alloc();
|
|
}
|
|
return static_cast<T *>(realloc_raw(ptr, old_size * sizeof(T),
|
|
new_size * sizeof(T), alignof(T)));
|
|
}
|
|
|
|
/**
|
|
* @brief Construct an object of type T in the arena using placement new.
|
|
*
|
|
* This is a convenience method that combines allocation with in-place
|
|
* construction. It properly handles alignment requirements for type T.
|
|
*
|
|
* @tparam T The type of object to construct (must be trivially destructible)
|
|
* @tparam Args Types of constructor arguments
|
|
* @param args Arguments to forward to T's constructor
|
|
* @return Pointer to the constructed object
|
|
* @throws std::bad_alloc if memory allocation fails
|
|
*
|
|
* ## Type Requirements:
|
|
* T must be trivially destructible (std::is_trivially_destructible_v<T>).
|
|
* This prevents subtle bugs since destructors are never called for objects
|
|
* constructed in the arena.
|
|
*
|
|
* ## Example:
|
|
* ```cpp
|
|
* int* num = arena.construct<int>(42); // ✓ Trivially
|
|
* destructible MyPOD* pod = arena.construct<MyPOD>(arg1, arg2); // ✓ If
|
|
* MyPOD is trivial std::string* str = arena.construct<std::string>("hi"); //
|
|
* ✗ Compile error!
|
|
* ```
|
|
*
|
|
* ## Note:
|
|
* Objects constructed this way cannot be individually destroyed.
|
|
* Their destructors will NOT be called automatically - hence the requirement
|
|
* for trivially destructible types.
|
|
*/
|
|
template <typename T, typename... Args> T *construct(Args &&...args) {
|
|
static_assert(
|
|
std::is_trivially_destructible_v<T>,
|
|
"ArenaAllocator::construct requires trivially destructible types. "
|
|
"Objects constructed in the arena will not have their destructors "
|
|
"called.");
|
|
void *ptr = allocate_raw(sizeof(T), alignof(T));
|
|
return new (ptr) T(std::forward<Args>(args)...);
|
|
}
|
|
|
|
/**
|
|
* @brief Allocate space for an array of size T objects with proper alignment.
|
|
*
|
|
* This is a type-safe convenience method that combines sizing and alignment
|
|
* calculations for allocating arrays of type T. It's preferred over calling
|
|
* allocate_raw() directly as it prevents common errors with size calculations
|
|
* and alignment requirements.
|
|
*
|
|
* @tparam T The type of objects to allocate space for (must be trivially
|
|
* destructible)
|
|
* @param size Number of T objects to allocate space for
|
|
* @return Pointer to allocated memory suitable for constructing an array of T
|
|
* objects
|
|
* @throws std::bad_alloc if memory allocation fails
|
|
*
|
|
* ## Type Requirements:
|
|
* T must be trivially destructible (std::is_trivially_destructible_v<T>).
|
|
* This ensures consistency with the arena allocator's design where
|
|
* destructors are never called.
|
|
*
|
|
* ## Example:
|
|
* ```cpp
|
|
* // Allocate space for 100 integers
|
|
* int* numbers = arena.allocate<int>(100);
|
|
*
|
|
* // Allocate space for 50 POD structs
|
|
* MyPOD* objects = arena.allocate<MyPOD>(50);
|
|
*
|
|
* // Initialize some elements (no automatic construction)
|
|
* numbers[0] = 42;
|
|
* new (&objects[0]) MyPOD(arg1, arg2);
|
|
* ```
|
|
*
|
|
* ## Note:
|
|
* This method only allocates memory - it does not construct objects.
|
|
* Use placement new or other initialization methods as needed.
|
|
*/
|
|
template <typename T> T *allocate(uint32_t size) {
|
|
static_assert(
|
|
std::is_trivially_destructible_v<T>,
|
|
"ArenaAllocator::allocate requires trivially destructible types. "
|
|
"Objects allocated in the arena will not have their destructors "
|
|
"called.");
|
|
if (size == 0) {
|
|
return nullptr;
|
|
}
|
|
if (size_t(size) * sizeof(T) > std::numeric_limits<uint32_t>::max()) {
|
|
throw std::bad_alloc();
|
|
}
|
|
void *ptr = allocate_raw(sizeof(T) * size, alignof(T));
|
|
return static_cast<T *>(ptr);
|
|
}
|
|
|
|
/**
|
|
* @brief Reset the allocator to reuse the first block, freeing all others.
|
|
*
|
|
* This method provides memory-efficient reset behavior by:
|
|
* 1. Keeping the first block for reuse
|
|
* 2. Freeing all subsequent blocks to prevent memory leaks
|
|
* 3. Resetting allocation position to the start of the first block
|
|
*
|
|
* If no blocks have been allocated yet, this is a no-op.
|
|
*
|
|
* ## Performance:
|
|
* - O(n) where n is the number of blocks to free
|
|
* - Prevents memory leaks by freeing unused blocks
|
|
* - Faster than destroying and recreating the allocator
|
|
*
|
|
* ## Example:
|
|
* ```cpp
|
|
* arena.allocate(1000); // Creates blocks
|
|
* arena.reset(); // Frees extra blocks, keeps first
|
|
* arena.allocate(100); // Reuses first block
|
|
* ```
|
|
*/
|
|
void reset();
|
|
|
|
/**
|
|
* @brief Get the total number of bytes allocated across all blocks.
|
|
*
|
|
* Uses O(1) accumulated counters for fast retrieval.
|
|
*
|
|
* @return Total allocated bytes, or 0 if no blocks exist
|
|
*/
|
|
size_t total_allocated() const {
|
|
return current_block_ ? current_block_->total_size : 0;
|
|
}
|
|
|
|
/**
|
|
* @brief Get the number of bytes currently used for allocations.
|
|
*
|
|
* This includes all fully used previous blocks plus the used portion
|
|
* of the current block. Uses O(1) accumulated counters.
|
|
*
|
|
* @return Number of bytes in use
|
|
*/
|
|
size_t used_bytes() const {
|
|
if (!current_block_) {
|
|
return 0;
|
|
}
|
|
return current_block_->total_used + current_block_->offset;
|
|
}
|
|
|
|
/**
|
|
* @brief Get the number of bytes available in the current block.
|
|
*
|
|
* @return Available bytes in current block, or 0 if no blocks exist
|
|
*/
|
|
size_t available_in_current_block() const {
|
|
return current_block_ ? current_block_->size - current_block_->offset : 0;
|
|
}
|
|
|
|
/**
|
|
* @brief Get the total number of blocks in the allocator.
|
|
*/
|
|
size_t num_blocks() const {
|
|
size_t result = 0;
|
|
for (auto *p = current_block_; p != nullptr; p = p->prev) {
|
|
++result;
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* @brief Debug function to find all intra-arena pointers.
|
|
*
|
|
* Scans all used memory in the arena for 64-bit aligned values that could be
|
|
* pointers to locations within the arena itself. This is useful for
|
|
* understanding memory references and potential data structures.
|
|
*
|
|
* @return Vector of PointerInfo structs containing source and target
|
|
* addresses
|
|
*/
|
|
struct PointerInfo {
|
|
const void *source_addr; ///< Address where the pointer was found
|
|
size_t source_block_number; ///< Block number containing the source
|
|
size_t source_offset; ///< Offset within the source block
|
|
const void *target_addr; ///< Address the pointer points to
|
|
size_t target_block_number; ///< Block number containing the target
|
|
size_t target_offset; ///< Offset within the target block
|
|
|
|
PointerInfo(const void *src, size_t src_block, size_t src_offset,
|
|
const void *target, size_t target_block, size_t target_offset)
|
|
: source_addr(src), source_block_number(src_block),
|
|
source_offset(src_offset), target_addr(target),
|
|
target_block_number(target_block), target_offset(target_offset) {}
|
|
};
|
|
|
|
std::vector<PointerInfo> find_intra_arena_pointers() const;
|
|
|
|
/**
|
|
* @brief Find which block and offset a given address belongs to.
|
|
*
|
|
* @param addr The address to locate within the arena
|
|
* @return PointerInfo with block number and offset, or invalid info if not
|
|
* found
|
|
*/
|
|
struct AddressLocation {
|
|
size_t block_number;
|
|
size_t offset_in_block;
|
|
bool found;
|
|
|
|
AddressLocation() : block_number(0), offset_in_block(0), found(false) {}
|
|
AddressLocation(size_t block, size_t offset)
|
|
: block_number(block), offset_in_block(offset), found(true) {}
|
|
};
|
|
|
|
AddressLocation find_address_location(const void *addr) const;
|
|
|
|
/**
|
|
* @brief Debug function to visualize the arena's layout and contents.
|
|
*
|
|
* Prints a detailed breakdown of all blocks, memory usage, and allocation
|
|
* patterns. This is useful for understanding memory fragmentation and
|
|
* allocation behavior during development and debugging.
|
|
*
|
|
* Output includes:
|
|
* - Overall arena statistics (total allocated, used, blocks)
|
|
* - Per-block breakdown with sizes and usage
|
|
* - Memory utilization percentages
|
|
* - Block chain visualization
|
|
* - Optional memory content visualization
|
|
*
|
|
* @param out Output stream to write debug information to (default: std::cout)
|
|
* @param show_memory_map If true, shows a visual memory map of used/free
|
|
* space
|
|
* @param show_content If true, shows actual memory contents in hex and ASCII
|
|
* @param content_limit Maximum bytes of content to show per block (default:
|
|
* 256)
|
|
*
|
|
* ## Example Output:
|
|
* ```
|
|
* === Arena Debug Dump ===
|
|
* Total allocated: 3072 bytes across 2 blocks
|
|
* Currently used: 1500 bytes (48.8% utilization)
|
|
* Available in current: 572 bytes
|
|
*
|
|
* Block Chain (newest to oldest):
|
|
* Block #2: 2048 bytes [used: 572/2048 = 27.9%] <- current
|
|
* Block #1: 1024 bytes [used: 1024/1024 = 100.0%]
|
|
*
|
|
* Memory Contents:
|
|
* Block #2 (first 256 bytes):
|
|
* 0x0000: 48656c6c 6f20576f 726c6400 54657374 |Hello World.Test|
|
|
* ```
|
|
*/
|
|
void debug_dump(std::ostream &out = std::cout, bool show_memory_map = false,
|
|
bool show_content = false, size_t content_limit = 256) const;
|
|
|
|
private:
|
|
/**
|
|
* @brief Add a new block with the specified size to the allocator.
|
|
*
|
|
* Creates a new block and makes it the current block. Updates all
|
|
* accumulated counters automatically through Block::create().
|
|
*
|
|
* @param size Size of the data area for the new block
|
|
*/
|
|
void add_block(size_t size);
|
|
|
|
/**
|
|
* @brief Calculate the size for the next block using geometric growth.
|
|
*
|
|
* Uses a doubling strategy to minimize the number of blocks while
|
|
* ensuring large allocations are handled efficiently.
|
|
*
|
|
* @param required_size Minimum size needed for the allocation
|
|
* @return Size for the next block (max of required_size and doubled current
|
|
* size)
|
|
*/
|
|
size_t calculate_next_block_size(size_t required_size) const;
|
|
|
|
/**
|
|
* @brief Align a value up to the specified alignment boundary.
|
|
*
|
|
* Uses bit manipulation for efficient alignment calculation.
|
|
* Only works with power-of-2 alignments.
|
|
*
|
|
* This method is kept inline in the header for maximum performance
|
|
* as it's called in the hot allocation path and benefits from inlining.
|
|
*
|
|
* @param value The value to align
|
|
* @param alignment The alignment boundary (must be power of 2)
|
|
* @return The aligned value
|
|
*/
|
|
static size_t align_up(size_t value, size_t alignment) {
|
|
if (alignment == 0 || (alignment & (alignment - 1)) != 0) {
|
|
return value;
|
|
}
|
|
return (value + alignment - 1) & ~(alignment - 1);
|
|
}
|
|
|
|
/**
|
|
* @brief Dump memory contents in hex/ASCII format.
|
|
*
|
|
* Displays memory in the classic hex dump format with 16 bytes per line,
|
|
* showing both hexadecimal values and ASCII representation.
|
|
*
|
|
* @param out Output stream to write to
|
|
* @param data Pointer to the memory to dump
|
|
* @param size Number of bytes to dump
|
|
*/
|
|
static void dump_memory_contents(std::ostream &out, const char *data,
|
|
size_t size);
|
|
|
|
/// Size used for the first block and baseline for geometric growth
|
|
uint32_t initial_block_size_;
|
|
/// Pointer to the current (most recent) block, or nullptr if no blocks exist
|
|
Block *current_block_;
|
|
};
|
|
|
|
/**
|
|
* @brief STL-compatible allocator that uses ArenaAllocator for memory
|
|
* management.
|
|
* @tparam T The type of objects to allocate
|
|
*/
|
|
template <typename T> class ArenaStlAllocator {
|
|
public:
|
|
using value_type = T;
|
|
using pointer = T *;
|
|
using const_pointer = const T *;
|
|
using reference = T &;
|
|
using const_reference = const T &;
|
|
using size_type = std::size_t;
|
|
using difference_type = std::ptrdiff_t;
|
|
|
|
template <typename U> struct rebind {
|
|
using other = ArenaStlAllocator<U>;
|
|
};
|
|
|
|
explicit ArenaStlAllocator(ArenaAllocator *arena) noexcept : arena_(arena) {}
|
|
|
|
template <typename U>
|
|
ArenaStlAllocator(const ArenaStlAllocator<U> &other) noexcept
|
|
: arena_(other.arena_) {}
|
|
|
|
T *allocate(size_type n) {
|
|
if (n == 0)
|
|
return nullptr;
|
|
return arena_->allocate<T>(n);
|
|
}
|
|
|
|
void deallocate(T *, size_type) noexcept {
|
|
// Arena allocator doesn't support individual deallocation
|
|
}
|
|
|
|
template <typename U>
|
|
bool operator==(const ArenaStlAllocator<U> &other) const noexcept {
|
|
return arena_ == other.arena_;
|
|
}
|
|
|
|
template <typename U>
|
|
bool operator!=(const ArenaStlAllocator<U> &other) const noexcept {
|
|
return arena_ != other.arena_;
|
|
}
|
|
|
|
ArenaAllocator *arena_;
|
|
|
|
template <typename U> friend class ArenaStlAllocator;
|
|
};
|