Move most of arena_allocator.hpp out of the header
This commit is contained in:
@@ -47,7 +47,8 @@ include_directories(src)
|
||||
|
||||
find_package(weaseljson REQUIRED)
|
||||
|
||||
set(SOURCES src/main.cpp src/config.cpp src/commit_request.cpp)
|
||||
set(SOURCES src/main.cpp src/config.cpp src/commit_request.cpp
|
||||
src/arena_allocator.cpp)
|
||||
|
||||
add_executable(weaseldb ${SOURCES})
|
||||
target_link_libraries(weaseldb Threads::Threads toml11::toml11 weaseljson)
|
||||
@@ -58,38 +59,41 @@ enable_testing()
|
||||
add_library(test_data STATIC benchmarks/test_data.cpp)
|
||||
target_include_directories(test_data PUBLIC benchmarks)
|
||||
|
||||
add_executable(test_arena_allocator tests/test_arena_allocator.cpp)
|
||||
add_executable(test_arena_allocator tests/test_arena_allocator.cpp
|
||||
src/arena_allocator.cpp)
|
||||
target_link_libraries(test_arena_allocator doctest::doctest)
|
||||
target_include_directories(test_arena_allocator PRIVATE src)
|
||||
|
||||
add_executable(test_commit_request tests/test_commit_request.cpp
|
||||
src/commit_request.cpp)
|
||||
add_executable(
|
||||
test_commit_request tests/test_commit_request.cpp src/commit_request.cpp
|
||||
src/arena_allocator.cpp)
|
||||
target_link_libraries(test_commit_request doctest::doctest weaseljson test_data)
|
||||
target_include_directories(test_commit_request PRIVATE src)
|
||||
|
||||
add_executable(bench_arena_allocator benchmarks/bench_arena_allocator.cpp)
|
||||
add_executable(bench_arena_allocator benchmarks/bench_arena_allocator.cpp
|
||||
src/arena_allocator.cpp)
|
||||
target_link_libraries(bench_arena_allocator nanobench)
|
||||
target_include_directories(bench_arena_allocator PRIVATE src)
|
||||
|
||||
add_executable(bench_commit_request benchmarks/bench_commit_request.cpp
|
||||
src/commit_request.cpp)
|
||||
add_executable(
|
||||
bench_commit_request benchmarks/bench_commit_request.cpp
|
||||
src/commit_request.cpp src/arena_allocator.cpp)
|
||||
target_link_libraries(bench_commit_request nanobench weaseljson test_data)
|
||||
target_include_directories(bench_commit_request PRIVATE src)
|
||||
|
||||
add_executable(bench_parser_comparison benchmarks/bench_parser_comparison.cpp
|
||||
src/commit_request.cpp)
|
||||
add_executable(
|
||||
bench_parser_comparison benchmarks/bench_parser_comparison.cpp
|
||||
src/commit_request.cpp src/arena_allocator.cpp)
|
||||
target_link_libraries(bench_parser_comparison nanobench weaseljson test_data
|
||||
nlohmann_json::nlohmann_json)
|
||||
target_include_directories(bench_parser_comparison PRIVATE src)
|
||||
|
||||
# Debug tools
|
||||
add_executable(debug_arena tools/debug_arena.cpp src/commit_request.cpp)
|
||||
add_executable(debug_arena tools/debug_arena.cpp src/commit_request.cpp
|
||||
src/arena_allocator.cpp)
|
||||
target_link_libraries(debug_arena weaseljson)
|
||||
target_include_directories(debug_arena PRIVATE src)
|
||||
|
||||
add_executable(test_multi_block test_multi_block.cpp)
|
||||
target_include_directories(test_multi_block PRIVATE src)
|
||||
|
||||
add_test(NAME arena_allocator_tests COMMAND test_arena_allocator)
|
||||
add_test(NAME commit_request_tests COMMAND test_commit_request)
|
||||
add_test(NAME arena_allocator_benchmarks COMMAND bench_arena_allocator)
|
||||
|
||||
383
src/arena_allocator.cpp
Normal file
383
src/arena_allocator.cpp
Normal file
@@ -0,0 +1,383 @@
|
||||
#include "arena_allocator.hpp"
|
||||
|
||||
ArenaAllocator::~ArenaAllocator() {
|
||||
while (current_block_) {
|
||||
Block *prev = current_block_->prev;
|
||||
std::free(current_block_);
|
||||
current_block_ = prev;
|
||||
}
|
||||
}
|
||||
|
||||
ArenaAllocator::ArenaAllocator(ArenaAllocator &&other) noexcept
|
||||
: initial_block_size_(other.initial_block_size_),
|
||||
current_block_(other.current_block_),
|
||||
current_offset_(other.current_offset_) {
|
||||
other.current_block_ = nullptr;
|
||||
other.current_offset_ = 0;
|
||||
}
|
||||
|
||||
ArenaAllocator &ArenaAllocator::operator=(ArenaAllocator &&other) noexcept {
|
||||
if (this != &other) {
|
||||
while (current_block_) {
|
||||
Block *prev = current_block_->prev;
|
||||
std::free(current_block_);
|
||||
current_block_ = prev;
|
||||
}
|
||||
|
||||
initial_block_size_ = other.initial_block_size_;
|
||||
current_block_ = other.current_block_;
|
||||
current_offset_ = other.current_offset_;
|
||||
|
||||
other.current_block_ = nullptr;
|
||||
other.current_offset_ = 0;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
void ArenaAllocator::reset() {
|
||||
if (!current_block_) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the first block by traversing backwards
|
||||
Block *first_block = current_block_;
|
||||
while (first_block && first_block->prev) {
|
||||
first_block = first_block->prev;
|
||||
}
|
||||
|
||||
// Free all blocks after the first one
|
||||
Block *current = current_block_;
|
||||
while (current && current != first_block) {
|
||||
Block *prev = current->prev;
|
||||
std::free(current);
|
||||
current = prev;
|
||||
}
|
||||
|
||||
// Update first block's counters to reflect only itself
|
||||
if (first_block) {
|
||||
first_block->total_size = first_block->size;
|
||||
first_block->block_count = 1;
|
||||
}
|
||||
|
||||
current_block_ = first_block;
|
||||
current_offset_ = 0;
|
||||
}
|
||||
|
||||
std::vector<ArenaAllocator::PointerInfo>
|
||||
ArenaAllocator::find_intra_arena_pointers() const {
|
||||
std::vector<PointerInfo> pointers;
|
||||
|
||||
if (!current_block_) {
|
||||
return pointers;
|
||||
}
|
||||
|
||||
// Build list of blocks from current to first
|
||||
std::vector<Block *> blocks;
|
||||
Block *block = current_block_;
|
||||
while (block) {
|
||||
blocks.push_back(block);
|
||||
block = block->prev;
|
||||
}
|
||||
|
||||
// Helper function to check if a pointer value points within the used area
|
||||
// of any block
|
||||
auto is_intra_arena_pointer = [&blocks,
|
||||
this](uint64_t pointer_value) -> bool {
|
||||
for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) {
|
||||
Block *b = blocks[block_idx];
|
||||
uintptr_t block_start = reinterpret_cast<uintptr_t>(b->data());
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (block_idx == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
uintptr_t block_used_end = block_start + block_used;
|
||||
|
||||
// Check if pointer falls within the used area of this block
|
||||
if (pointer_value >= block_start && pointer_value < block_used_end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
// Scan each block for pointers
|
||||
for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) {
|
||||
Block *b = blocks[block_idx];
|
||||
const char *data = b->data();
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (block_idx == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
// Scan for 64-bit aligned pointers
|
||||
for (size_t offset = 0; offset + sizeof(uint64_t) <= block_used;
|
||||
offset += sizeof(uint64_t)) {
|
||||
uint64_t potential_pointer;
|
||||
std::memcpy(&potential_pointer, data + offset, sizeof(potential_pointer));
|
||||
|
||||
// Check if this value points within the used area of any block
|
||||
if (is_intra_arena_pointer(potential_pointer)) {
|
||||
// Find target location within arena
|
||||
auto target_location = find_address_location(
|
||||
reinterpret_cast<const void *>(potential_pointer));
|
||||
|
||||
pointers.emplace_back(
|
||||
data + offset, // source address
|
||||
blocks.size() - block_idx, // source block number (1-based)
|
||||
offset, // source offset in block
|
||||
reinterpret_cast<const void *>(potential_pointer), // target address
|
||||
target_location.found ? target_location.block_number
|
||||
: 0, // target block number
|
||||
target_location.found ? target_location.offset_in_block
|
||||
: 0 // target offset
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return pointers;
|
||||
}
|
||||
|
||||
ArenaAllocator::AddressLocation
|
||||
ArenaAllocator::find_address_location(const void *addr) const {
|
||||
if (!current_block_ || !addr) {
|
||||
return AddressLocation();
|
||||
}
|
||||
|
||||
uintptr_t target_addr = reinterpret_cast<uintptr_t>(addr);
|
||||
|
||||
// Build list of blocks from current to first
|
||||
std::vector<Block *> blocks;
|
||||
Block *block = current_block_;
|
||||
while (block) {
|
||||
blocks.push_back(block);
|
||||
block = block->prev;
|
||||
}
|
||||
|
||||
// Check each block to see if the address falls within its used area
|
||||
for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) {
|
||||
Block *b = blocks[block_idx];
|
||||
uintptr_t block_start = reinterpret_cast<uintptr_t>(b->data());
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (block_idx == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
uintptr_t block_used_end = block_start + block_used;
|
||||
|
||||
// Check if address falls within the used area of this block
|
||||
if (target_addr >= block_start && target_addr < block_used_end) {
|
||||
return AddressLocation(
|
||||
blocks.size() - block_idx, // block number (1-based)
|
||||
target_addr - block_start // offset within block
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return AddressLocation(); // Not found
|
||||
}
|
||||
|
||||
void ArenaAllocator::debug_dump(std::ostream &out, bool show_memory_map,
|
||||
bool show_content, size_t content_limit) const {
|
||||
out << "=== Arena Debug Dump ===" << std::endl;
|
||||
|
||||
if (!current_block_) {
|
||||
out << "Arena is empty (no blocks allocated)" << std::endl;
|
||||
out << "Initial block size: " << initial_block_size_ << " bytes"
|
||||
<< std::endl;
|
||||
return;
|
||||
}
|
||||
|
||||
// Overall statistics
|
||||
size_t total_alloc = this->total_allocated();
|
||||
size_t used = used_bytes();
|
||||
double utilization = total_alloc > 0 ? (100.0 * used / total_alloc) : 0.0;
|
||||
|
||||
out << "Total allocated: " << total_alloc << " bytes across " << num_blocks()
|
||||
<< " blocks" << std::endl;
|
||||
out << "Currently used: " << used << " bytes (" << std::fixed
|
||||
<< std::setprecision(1) << utilization << "% utilization)" << std::endl;
|
||||
out << "Available in current: " << available_in_current_block() << " bytes"
|
||||
<< std::endl;
|
||||
out << std::endl;
|
||||
|
||||
// Build list of blocks from current to first
|
||||
std::vector<Block *> blocks;
|
||||
Block *block = current_block_;
|
||||
while (block) {
|
||||
blocks.push_back(block);
|
||||
block = block->prev;
|
||||
}
|
||||
|
||||
out << "Block Chain (newest to oldest):" << std::endl;
|
||||
|
||||
// Display blocks in reverse order (current first)
|
||||
for (size_t i = 0; i < blocks.size(); ++i) {
|
||||
Block *b = blocks[i];
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (i == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
double block_util = b->size > 0 ? (100.0 * block_used / b->size) : 0.0;
|
||||
|
||||
out << "Block #" << (blocks.size() - i) << ": " << b->size << " bytes "
|
||||
<< "[used: " << block_used << "/" << b->size << " = " << std::fixed
|
||||
<< std::setprecision(1) << block_util << "%]";
|
||||
|
||||
if (i == 0) {
|
||||
out << " <- current";
|
||||
}
|
||||
out << std::endl;
|
||||
|
||||
// Show memory map if requested
|
||||
if (show_memory_map && b->size > 0) {
|
||||
const size_t map_width = 60;
|
||||
size_t used_chars = (map_width * block_used) / b->size;
|
||||
used_chars = std::min(used_chars, map_width);
|
||||
|
||||
out << " [";
|
||||
for (size_t j = 0; j < map_width; ++j) {
|
||||
if (j < used_chars) {
|
||||
out << "#";
|
||||
} else {
|
||||
out << ".";
|
||||
}
|
||||
}
|
||||
out << "] (# = used, . = free)" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
out << std::endl;
|
||||
out << "Block addresses and relationships:" << std::endl;
|
||||
for (size_t i = 0; i < blocks.size(); ++i) {
|
||||
Block *b = blocks[i];
|
||||
out << "Block #" << (blocks.size() - i) << " @ " << static_cast<void *>(b)
|
||||
<< " -> data @ " << static_cast<void *>(b->data());
|
||||
if (b->prev) {
|
||||
out << " (prev: " << static_cast<void *>(b->prev) << ")";
|
||||
} else {
|
||||
out << " (first block)";
|
||||
}
|
||||
out << std::endl;
|
||||
}
|
||||
|
||||
// Show memory contents if requested
|
||||
if (show_content) {
|
||||
out << std::endl;
|
||||
out << "Memory Contents:" << std::endl;
|
||||
|
||||
for (size_t i = 0; i < blocks.size(); ++i) {
|
||||
Block *b = blocks[i];
|
||||
size_t block_num = blocks.size() - i;
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (i == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
if (block_used == 0) {
|
||||
out << "Block #" << block_num << ": No content (empty)" << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
size_t bytes_to_show = std::min(block_used, content_limit);
|
||||
out << "Block #" << block_num << " (first " << bytes_to_show << " of "
|
||||
<< block_used << " used bytes):" << std::endl;
|
||||
|
||||
const char *data = b->data();
|
||||
dump_memory_contents(out, data, bytes_to_show);
|
||||
|
||||
if (bytes_to_show < block_used) {
|
||||
out << " ... (" << (block_used - bytes_to_show) << " more bytes)"
|
||||
<< std::endl;
|
||||
}
|
||||
out << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ArenaAllocator::add_block(size_t size) {
|
||||
Block *new_block = Block::create(size, current_block_);
|
||||
current_block_ = new_block;
|
||||
current_offset_ = 0;
|
||||
}
|
||||
|
||||
size_t ArenaAllocator::calculate_next_block_size(size_t required_size) const {
|
||||
size_t current_size =
|
||||
current_block_ ? current_block_->size : initial_block_size_;
|
||||
size_t doubled_size = current_size * 2;
|
||||
|
||||
return std::max(required_size, doubled_size);
|
||||
}
|
||||
|
||||
void ArenaAllocator::dump_memory_contents(std::ostream &out, const char *data,
|
||||
size_t size) {
|
||||
const size_t bytes_per_line = 16;
|
||||
|
||||
for (size_t offset = 0; offset < size; offset += bytes_per_line) {
|
||||
// Print offset
|
||||
out << " 0x" << std::setfill('0') << std::setw(4) << std::hex << offset
|
||||
<< ": ";
|
||||
|
||||
size_t bytes_in_line = std::min(bytes_per_line, size - offset);
|
||||
|
||||
// Print hex bytes
|
||||
for (size_t i = 0; i < bytes_per_line; ++i) {
|
||||
if (i < bytes_in_line) {
|
||||
unsigned char byte = static_cast<unsigned char>(data[offset + i]);
|
||||
out << std::setfill('0') << std::setw(2) << std::hex
|
||||
<< static_cast<int>(byte);
|
||||
} else {
|
||||
out << " "; // Padding for incomplete lines
|
||||
}
|
||||
|
||||
// Add space every 4 bytes for readability
|
||||
if ((i + 1) % 4 == 0) {
|
||||
out << " ";
|
||||
}
|
||||
}
|
||||
|
||||
// Print ASCII representation
|
||||
out << " |";
|
||||
for (size_t i = 0; i < bytes_in_line; ++i) {
|
||||
char c = data[offset + i];
|
||||
if (c >= 32 && c <= 126) { // Printable ASCII
|
||||
out << c;
|
||||
} else {
|
||||
out << '.'; // Non-printable characters
|
||||
}
|
||||
}
|
||||
out << "|" << std::dec << std::endl;
|
||||
}
|
||||
}
|
||||
@@ -125,13 +125,7 @@ public:
|
||||
* Traverses the intrusive linked list backwards from current_block_,
|
||||
* freeing each block. This ensures no memory leaks.
|
||||
*/
|
||||
~ArenaAllocator() {
|
||||
while (current_block_) {
|
||||
Block *prev = current_block_->prev;
|
||||
std::free(current_block_);
|
||||
current_block_ = prev;
|
||||
}
|
||||
}
|
||||
~ArenaAllocator();
|
||||
|
||||
/// Copy construction is not allowed (would be expensive and error-prone)
|
||||
ArenaAllocator(const ArenaAllocator &) = delete;
|
||||
@@ -142,13 +136,7 @@ public:
|
||||
* @brief Move constructor - transfers ownership of all blocks.
|
||||
* @param other The ArenaAllocator to move from (will be left empty)
|
||||
*/
|
||||
ArenaAllocator(ArenaAllocator &&other) noexcept
|
||||
: initial_block_size_(other.initial_block_size_),
|
||||
current_block_(other.current_block_),
|
||||
current_offset_(other.current_offset_) {
|
||||
other.current_block_ = nullptr;
|
||||
other.current_offset_ = 0;
|
||||
}
|
||||
ArenaAllocator(ArenaAllocator &&other) noexcept;
|
||||
|
||||
/**
|
||||
* @brief Move assignment operator - transfers ownership of all blocks.
|
||||
@@ -159,23 +147,7 @@ public:
|
||||
* @param other The ArenaAllocator to move from (will be left empty)
|
||||
* @return Reference to this allocator
|
||||
*/
|
||||
ArenaAllocator &operator=(ArenaAllocator &&other) noexcept {
|
||||
if (this != &other) {
|
||||
while (current_block_) {
|
||||
Block *prev = current_block_->prev;
|
||||
std::free(current_block_);
|
||||
current_block_ = prev;
|
||||
}
|
||||
|
||||
initial_block_size_ = other.initial_block_size_;
|
||||
current_block_ = other.current_block_;
|
||||
current_offset_ = other.current_offset_;
|
||||
|
||||
other.current_block_ = nullptr;
|
||||
other.current_offset_ = 0;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
ArenaAllocator &operator=(ArenaAllocator &&other) noexcept;
|
||||
|
||||
/**
|
||||
* @brief Allocate memory with the specified size and alignment.
|
||||
@@ -201,6 +173,11 @@ public:
|
||||
* MyStruct* ptr3 = static_cast<MyStruct*>(
|
||||
* arena.allocate(sizeof(MyStruct), alignof(MyStruct)));
|
||||
* ```
|
||||
*
|
||||
* ## Performance Note:
|
||||
* This method is kept inline in the header for maximum performance.
|
||||
* The allocation path is extremely hot and inlining eliminates function
|
||||
* call overhead, allowing the ~1ns allocation performance.
|
||||
*/
|
||||
void *allocate(size_t size, size_t alignment = alignof(std::max_align_t)) {
|
||||
if (size == 0) {
|
||||
@@ -293,34 +270,7 @@ public:
|
||||
* arena.allocate(100); // Reuses first block
|
||||
* ```
|
||||
*/
|
||||
void reset() {
|
||||
if (!current_block_) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the first block by traversing backwards
|
||||
Block *first_block = current_block_;
|
||||
while (first_block && first_block->prev) {
|
||||
first_block = first_block->prev;
|
||||
}
|
||||
|
||||
// Free all blocks after the first one
|
||||
Block *current = current_block_;
|
||||
while (current && current != first_block) {
|
||||
Block *prev = current->prev;
|
||||
std::free(current);
|
||||
current = prev;
|
||||
}
|
||||
|
||||
// Update first block's counters to reflect only itself
|
||||
if (first_block) {
|
||||
first_block->total_size = first_block->size;
|
||||
first_block->block_count = 1;
|
||||
}
|
||||
|
||||
current_block_ = first_block;
|
||||
current_offset_ = 0;
|
||||
}
|
||||
void reset();
|
||||
|
||||
/**
|
||||
* @brief Get the total number of bytes allocated across all blocks.
|
||||
@@ -395,94 +345,7 @@ public:
|
||||
target_block_number(target_block), target_offset(target_offset) {}
|
||||
};
|
||||
|
||||
std::vector<PointerInfo> find_intra_arena_pointers() const {
|
||||
std::vector<PointerInfo> pointers;
|
||||
|
||||
if (!current_block_) {
|
||||
return pointers;
|
||||
}
|
||||
|
||||
// Build list of blocks from current to first
|
||||
std::vector<Block *> blocks;
|
||||
Block *block = current_block_;
|
||||
while (block) {
|
||||
blocks.push_back(block);
|
||||
block = block->prev;
|
||||
}
|
||||
|
||||
// Helper function to check if a pointer value points within the used area
|
||||
// of any block
|
||||
auto is_intra_arena_pointer = [&blocks,
|
||||
this](uint64_t pointer_value) -> bool {
|
||||
for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) {
|
||||
Block *b = blocks[block_idx];
|
||||
uintptr_t block_start = reinterpret_cast<uintptr_t>(b->data());
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (block_idx == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
uintptr_t block_used_end = block_start + block_used;
|
||||
|
||||
// Check if pointer falls within the used area of this block
|
||||
if (pointer_value >= block_start && pointer_value < block_used_end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
// Scan each block for pointers
|
||||
for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) {
|
||||
Block *b = blocks[block_idx];
|
||||
const char *data = b->data();
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (block_idx == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
// Scan for 64-bit aligned pointers
|
||||
for (size_t offset = 0; offset + sizeof(uint64_t) <= block_used;
|
||||
offset += sizeof(uint64_t)) {
|
||||
uint64_t potential_pointer;
|
||||
std::memcpy(&potential_pointer, data + offset,
|
||||
sizeof(potential_pointer));
|
||||
|
||||
// Check if this value points within the used area of any block
|
||||
if (is_intra_arena_pointer(potential_pointer)) {
|
||||
// Find target location within arena
|
||||
auto target_location = find_address_location(
|
||||
reinterpret_cast<const void *>(potential_pointer));
|
||||
|
||||
pointers.emplace_back(
|
||||
data + offset, // source address
|
||||
blocks.size() - block_idx, // source block number (1-based)
|
||||
offset, // source offset in block
|
||||
reinterpret_cast<const void *>(
|
||||
potential_pointer), // target address
|
||||
target_location.found ? target_location.block_number
|
||||
: 0, // target block number
|
||||
target_location.found ? target_location.offset_in_block
|
||||
: 0 // target offset
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return pointers;
|
||||
}
|
||||
std::vector<PointerInfo> find_intra_arena_pointers() const;
|
||||
|
||||
/**
|
||||
* @brief Find which block and offset a given address belongs to.
|
||||
@@ -501,49 +364,7 @@ public:
|
||||
: block_number(block), offset_in_block(offset), found(true) {}
|
||||
};
|
||||
|
||||
AddressLocation find_address_location(const void *addr) const {
|
||||
if (!current_block_ || !addr) {
|
||||
return AddressLocation();
|
||||
}
|
||||
|
||||
uintptr_t target_addr = reinterpret_cast<uintptr_t>(addr);
|
||||
|
||||
// Build list of blocks from current to first
|
||||
std::vector<Block *> blocks;
|
||||
Block *block = current_block_;
|
||||
while (block) {
|
||||
blocks.push_back(block);
|
||||
block = block->prev;
|
||||
}
|
||||
|
||||
// Check each block to see if the address falls within its used area
|
||||
for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) {
|
||||
Block *b = blocks[block_idx];
|
||||
uintptr_t block_start = reinterpret_cast<uintptr_t>(b->data());
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (block_idx == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
uintptr_t block_used_end = block_start + block_used;
|
||||
|
||||
// Check if address falls within the used area of this block
|
||||
if (target_addr >= block_start && target_addr < block_used_end) {
|
||||
return AddressLocation(
|
||||
blocks.size() - block_idx, // block number (1-based)
|
||||
target_addr - block_start // offset within block
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return AddressLocation(); // Not found
|
||||
}
|
||||
AddressLocation find_address_location(const void *addr) const;
|
||||
|
||||
/**
|
||||
* @brief Debug function to visualize the arena's layout and contents.
|
||||
@@ -583,135 +404,7 @@ public:
|
||||
* ```
|
||||
*/
|
||||
void debug_dump(std::ostream &out = std::cout, bool show_memory_map = false,
|
||||
bool show_content = false, size_t content_limit = 256) const {
|
||||
out << "=== Arena Debug Dump ===" << std::endl;
|
||||
|
||||
if (!current_block_) {
|
||||
out << "Arena is empty (no blocks allocated)" << std::endl;
|
||||
out << "Initial block size: " << initial_block_size_ << " bytes"
|
||||
<< std::endl;
|
||||
return;
|
||||
}
|
||||
|
||||
// Overall statistics
|
||||
size_t total_alloc = this->total_allocated();
|
||||
size_t used = used_bytes();
|
||||
double utilization = total_alloc > 0 ? (100.0 * used / total_alloc) : 0.0;
|
||||
|
||||
out << "Total allocated: " << total_alloc << " bytes across "
|
||||
<< num_blocks() << " blocks" << std::endl;
|
||||
out << "Currently used: " << used << " bytes (" << std::fixed
|
||||
<< std::setprecision(1) << utilization << "% utilization)" << std::endl;
|
||||
out << "Available in current: " << available_in_current_block() << " bytes"
|
||||
<< std::endl;
|
||||
out << std::endl;
|
||||
|
||||
// Build list of blocks from current to first
|
||||
std::vector<Block *> blocks;
|
||||
Block *block = current_block_;
|
||||
while (block) {
|
||||
blocks.push_back(block);
|
||||
block = block->prev;
|
||||
}
|
||||
|
||||
out << "Block Chain (newest to oldest):" << std::endl;
|
||||
|
||||
// Display blocks in reverse order (current first)
|
||||
for (size_t i = 0; i < blocks.size(); ++i) {
|
||||
Block *b = blocks[i];
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (i == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
double block_util = b->size > 0 ? (100.0 * block_used / b->size) : 0.0;
|
||||
|
||||
out << "Block #" << (blocks.size() - i) << ": " << b->size << " bytes "
|
||||
<< "[used: " << block_used << "/" << b->size << " = " << std::fixed
|
||||
<< std::setprecision(1) << block_util << "%]";
|
||||
|
||||
if (i == 0) {
|
||||
out << " <- current";
|
||||
}
|
||||
out << std::endl;
|
||||
|
||||
// Show memory map if requested
|
||||
if (show_memory_map && b->size > 0) {
|
||||
const size_t map_width = 60;
|
||||
size_t used_chars = (map_width * block_used) / b->size;
|
||||
used_chars = std::min(used_chars, map_width);
|
||||
|
||||
out << " [";
|
||||
for (size_t j = 0; j < map_width; ++j) {
|
||||
if (j < used_chars) {
|
||||
out << "#";
|
||||
} else {
|
||||
out << ".";
|
||||
}
|
||||
}
|
||||
out << "] (# = used, . = free)" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
out << std::endl;
|
||||
out << "Block addresses and relationships:" << std::endl;
|
||||
for (size_t i = 0; i < blocks.size(); ++i) {
|
||||
Block *b = blocks[i];
|
||||
out << "Block #" << (blocks.size() - i) << " @ " << static_cast<void *>(b)
|
||||
<< " -> data @ " << static_cast<void *>(b->data());
|
||||
if (b->prev) {
|
||||
out << " (prev: " << static_cast<void *>(b->prev) << ")";
|
||||
} else {
|
||||
out << " (first block)";
|
||||
}
|
||||
out << std::endl;
|
||||
}
|
||||
|
||||
// Show memory contents if requested
|
||||
if (show_content) {
|
||||
out << std::endl;
|
||||
out << "Memory Contents:" << std::endl;
|
||||
|
||||
for (size_t i = 0; i < blocks.size(); ++i) {
|
||||
Block *b = blocks[i];
|
||||
size_t block_num = blocks.size() - i;
|
||||
|
||||
// Calculate used bytes in this specific block
|
||||
size_t block_used;
|
||||
if (i == 0) {
|
||||
// Current block - use current_offset_
|
||||
block_used = current_offset_;
|
||||
} else {
|
||||
// Previous blocks are fully used
|
||||
block_used = b->size;
|
||||
}
|
||||
|
||||
if (block_used == 0) {
|
||||
out << "Block #" << block_num << ": No content (empty)" << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
size_t bytes_to_show = std::min(block_used, content_limit);
|
||||
out << "Block #" << block_num << " (first " << bytes_to_show << " of "
|
||||
<< block_used << " used bytes):" << std::endl;
|
||||
|
||||
const char *data = b->data();
|
||||
dump_memory_contents(out, data, bytes_to_show);
|
||||
|
||||
if (bytes_to_show < block_used) {
|
||||
out << " ... (" << (block_used - bytes_to_show) << " more bytes)"
|
||||
<< std::endl;
|
||||
}
|
||||
out << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
bool show_content = false, size_t content_limit = 256) const;
|
||||
|
||||
private:
|
||||
/**
|
||||
@@ -722,11 +415,7 @@ private:
|
||||
*
|
||||
* @param size Size of the data area for the new block
|
||||
*/
|
||||
void add_block(size_t size) {
|
||||
Block *new_block = Block::create(size, current_block_);
|
||||
current_block_ = new_block;
|
||||
current_offset_ = 0;
|
||||
}
|
||||
void add_block(size_t size);
|
||||
|
||||
/**
|
||||
* @brief Calculate the size for the next block using geometric growth.
|
||||
@@ -738,13 +427,7 @@ private:
|
||||
* @return Size for the next block (max of required_size and doubled current
|
||||
* size)
|
||||
*/
|
||||
size_t calculate_next_block_size(size_t required_size) const {
|
||||
size_t current_size =
|
||||
current_block_ ? current_block_->size : initial_block_size_;
|
||||
size_t doubled_size = current_size * 2;
|
||||
|
||||
return std::max(required_size, doubled_size);
|
||||
}
|
||||
size_t calculate_next_block_size(size_t required_size) const;
|
||||
|
||||
/**
|
||||
* @brief Align a value up to the specified alignment boundary.
|
||||
@@ -752,6 +435,9 @@ private:
|
||||
* Uses bit manipulation for efficient alignment calculation.
|
||||
* Only works with power-of-2 alignments.
|
||||
*
|
||||
* This method is kept inline in the header for maximum performance
|
||||
* as it's called in the hot allocation path and benefits from inlining.
|
||||
*
|
||||
* @param value The value to align
|
||||
* @param alignment The alignment boundary (must be power of 2)
|
||||
* @return The aligned value
|
||||
@@ -774,45 +460,7 @@ private:
|
||||
* @param size Number of bytes to dump
|
||||
*/
|
||||
static void dump_memory_contents(std::ostream &out, const char *data,
|
||||
size_t size) {
|
||||
const size_t bytes_per_line = 16;
|
||||
|
||||
for (size_t offset = 0; offset < size; offset += bytes_per_line) {
|
||||
// Print offset
|
||||
out << " 0x" << std::setfill('0') << std::setw(4) << std::hex << offset
|
||||
<< ": ";
|
||||
|
||||
size_t bytes_in_line = std::min(bytes_per_line, size - offset);
|
||||
|
||||
// Print hex bytes
|
||||
for (size_t i = 0; i < bytes_per_line; ++i) {
|
||||
if (i < bytes_in_line) {
|
||||
unsigned char byte = static_cast<unsigned char>(data[offset + i]);
|
||||
out << std::setfill('0') << std::setw(2) << std::hex
|
||||
<< static_cast<int>(byte);
|
||||
} else {
|
||||
out << " "; // Padding for incomplete lines
|
||||
}
|
||||
|
||||
// Add space every 4 bytes for readability
|
||||
if ((i + 1) % 4 == 0) {
|
||||
out << " ";
|
||||
}
|
||||
}
|
||||
|
||||
// Print ASCII representation
|
||||
out << " |";
|
||||
for (size_t i = 0; i < bytes_in_line; ++i) {
|
||||
char c = data[offset + i];
|
||||
if (c >= 32 && c <= 126) { // Printable ASCII
|
||||
out << c;
|
||||
} else {
|
||||
out << '.'; // Non-printable characters
|
||||
}
|
||||
}
|
||||
out << "|" << std::dec << std::endl;
|
||||
}
|
||||
}
|
||||
size_t size);
|
||||
|
||||
/// Size used for the first block and baseline for geometric growth
|
||||
size_t initial_block_size_;
|
||||
|
||||
Reference in New Issue
Block a user