Accurately track used bytes in Arena
This commit is contained in:
@@ -6,6 +6,7 @@
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
#include <new>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
@@ -73,11 +74,11 @@ private:
|
||||
* - Accumulated counters for O(1) tracking operations
|
||||
*/
|
||||
struct Block {
|
||||
size_t size; ///< Size of this block's data area
|
||||
Block *prev; ///< Pointer to previous block (nullptr for first block)
|
||||
uint32_t size; ///< Size of this block's data area
|
||||
uint32_t offset; ///< The offset of the first unused byte in the data area
|
||||
size_t total_size; ///< Accumulated size of this block + all previous blocks
|
||||
size_t block_count; ///< Number of blocks including this one + all previous
|
||||
///< blocks
|
||||
size_t total_used; ///< Accumulated offsets of previous blocks
|
||||
Block *prev; ///< Pointer to previous block (nullptr for first block)
|
||||
|
||||
/**
|
||||
* @brief Get pointer to the data area of this block.
|
||||
@@ -93,14 +94,18 @@ private:
|
||||
* @throws std::bad_alloc if memory allocation fails
|
||||
*/
|
||||
static Block *create(size_t size, Block *prev) {
|
||||
if (size > std::numeric_limits<uint32_t>::max()) {
|
||||
throw std::bad_alloc();
|
||||
}
|
||||
void *memory = std::aligned_alloc(
|
||||
alignof(Block), align_up(sizeof(Block) + size, alignof(Block)));
|
||||
if (!memory) {
|
||||
throw std::bad_alloc();
|
||||
}
|
||||
size_t total_size = size + (prev ? prev->total_size : 0);
|
||||
size_t block_count = 1 + (prev ? prev->block_count : 0);
|
||||
Block *block = new (memory) Block{size, prev, total_size, block_count};
|
||||
size_t total_used = prev ? prev->total_used + prev->offset : 0;
|
||||
Block *block = new (memory)
|
||||
Block{uint32_t(size), /*offset*/ 0, total_size, total_used, prev};
|
||||
return block;
|
||||
}
|
||||
};
|
||||
@@ -116,8 +121,7 @@ public:
|
||||
* @param initial_size Size in bytes for the first block (default: 1024)
|
||||
*/
|
||||
explicit ArenaAllocator(size_t initial_size = 1024)
|
||||
: initial_block_size_(initial_size), current_block_(nullptr),
|
||||
current_offset_(0) {}
|
||||
: initial_block_size_(initial_size), current_block_(nullptr) {}
|
||||
|
||||
/**
|
||||
* @brief Destructor - frees all allocated blocks.
|
||||
@@ -181,7 +185,7 @@ public:
|
||||
* The allocation path is extremely hot and inlining eliminates function
|
||||
* call overhead, allowing the ~1ns allocation performance.
|
||||
*/
|
||||
void *allocate_raw(size_t size,
|
||||
void *allocate_raw(uint32_t size,
|
||||
size_t alignment = alignof(std::max_align_t)) {
|
||||
if (size == 0) {
|
||||
return nullptr;
|
||||
@@ -195,7 +199,7 @@ public:
|
||||
char *block_start = current_block_->data();
|
||||
uintptr_t block_addr = reinterpret_cast<uintptr_t>(block_start);
|
||||
size_t aligned_offset =
|
||||
align_up(block_addr + current_offset_, alignment) - block_addr;
|
||||
align_up(block_addr + current_block_->offset, alignment) - block_addr;
|
||||
|
||||
if (aligned_offset + size > current_block_->size) {
|
||||
size_t next_block_size = calculate_next_block_size(size);
|
||||
@@ -206,7 +210,7 @@ public:
|
||||
}
|
||||
|
||||
void *ptr = block_start + aligned_offset;
|
||||
current_offset_ = aligned_offset + size;
|
||||
current_block_->offset = aligned_offset + size;
|
||||
|
||||
return ptr;
|
||||
}
|
||||
@@ -247,8 +251,8 @@ public:
|
||||
* - Like malloc/realloc, the contents beyond old_size are uninitialized
|
||||
* - When copying to new location, uses the specified alignment
|
||||
*/
|
||||
void *realloc_raw(void *ptr, size_t old_size, size_t new_size,
|
||||
size_t alignment = alignof(std::max_align_t));
|
||||
void *realloc_raw(void *ptr, uint32_t old_size, uint32_t new_size,
|
||||
uint32_t alignment = alignof(std::max_align_t));
|
||||
|
||||
/**
|
||||
* @brief Reallocate memory, extending in place if possible or copying to a
|
||||
@@ -285,7 +289,11 @@ public:
|
||||
* - Like malloc/realloc, the contents beyond old_size are uninitialized
|
||||
* - When copying to new location, uses the specified alignment
|
||||
*/
|
||||
template <typename T> T *realloc(T *ptr, size_t old_size, size_t new_size) {
|
||||
template <typename T>
|
||||
T *realloc(T *ptr, uint32_t old_size, uint32_t new_size) {
|
||||
if (size_t(new_size) * sizeof(T) > std::numeric_limits<uint32_t>::max()) {
|
||||
throw std::bad_alloc();
|
||||
}
|
||||
return static_cast<T *>(realloc_raw(ptr, old_size * sizeof(T),
|
||||
new_size * sizeof(T), alignof(T)));
|
||||
}
|
||||
@@ -367,7 +375,7 @@ public:
|
||||
* This method only allocates memory - it does not construct objects.
|
||||
* Use placement new or other initialization methods as needed.
|
||||
*/
|
||||
template <typename T> T *allocate(size_t size) {
|
||||
template <typename T> T *allocate(uint32_t size) {
|
||||
static_assert(
|
||||
std::is_trivially_destructible_v<T>,
|
||||
"ArenaAllocator::allocate requires trivially destructible types. "
|
||||
@@ -376,6 +384,9 @@ public:
|
||||
if (size == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
if (size_t(size) * sizeof(T) > std::numeric_limits<uint32_t>::max()) {
|
||||
throw std::bad_alloc();
|
||||
}
|
||||
void *ptr = allocate_raw(sizeof(T) * size, alignof(T));
|
||||
return static_cast<T *>(ptr);
|
||||
}
|
||||
@@ -427,9 +438,7 @@ public:
|
||||
if (!current_block_) {
|
||||
return 0;
|
||||
}
|
||||
size_t prev_total =
|
||||
current_block_->prev ? current_block_->prev->total_size : 0;
|
||||
return prev_total + current_offset_;
|
||||
return current_block_->total_used + current_block_->offset;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -438,18 +447,18 @@ public:
|
||||
* @return Available bytes in current block, or 0 if no blocks exist
|
||||
*/
|
||||
size_t available_in_current_block() const {
|
||||
return current_block_ ? current_block_->size - current_offset_ : 0;
|
||||
return current_block_ ? current_block_->size - current_block_->offset : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get the total number of blocks in the allocator.
|
||||
*
|
||||
* Uses O(1) accumulated counters for fast retrieval.
|
||||
*
|
||||
* @return Number of blocks, or 0 if no blocks exist
|
||||
*/
|
||||
size_t num_blocks() const {
|
||||
return current_block_ ? current_block_->block_count : 0;
|
||||
size_t result = 0;
|
||||
for (auto *p = current_block_; p != nullptr; p = p->prev) {
|
||||
++result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -595,11 +604,9 @@ private:
|
||||
size_t size);
|
||||
|
||||
/// Size used for the first block and baseline for geometric growth
|
||||
size_t initial_block_size_;
|
||||
uint32_t initial_block_size_;
|
||||
/// Pointer to the current (most recent) block, or nullptr if no blocks exist
|
||||
Block *current_block_;
|
||||
/// Current offset within the current block's data area
|
||||
size_t current_offset_;
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user