#include "arena_allocator.hpp" #include #include #include ArenaAllocator::~ArenaAllocator() { while (current_block_) { Block *prev = current_block_->prev; std::free(current_block_); current_block_ = prev; } } ArenaAllocator::ArenaAllocator(ArenaAllocator &&other) noexcept : initial_block_size_(other.initial_block_size_), current_block_(other.current_block_) { other.current_block_ = nullptr; } ArenaAllocator &ArenaAllocator::operator=(ArenaAllocator &&other) noexcept { if (this != &other) { while (current_block_) { Block *prev = current_block_->prev; std::free(current_block_); current_block_ = prev; } initial_block_size_ = other.initial_block_size_; current_block_ = other.current_block_; other.current_block_ = nullptr; } return *this; } void ArenaAllocator::reset() { if (!current_block_) { return; } // Find the first block by traversing backwards Block *first_block = current_block_; while (first_block && first_block->prev) { first_block = first_block->prev; } // Free all blocks after the first one Block *current = current_block_; while (current && current != first_block) { Block *prev = current->prev; std::free(current); current = prev; } // Update first block's counters to reflect only itself if (first_block) { first_block->total_size = first_block->size; first_block->total_used = 0; } current_block_ = first_block; current_block_->offset = 0; } void *ArenaAllocator::realloc_raw(void *ptr, uint32_t old_size, uint32_t new_size, uint32_t alignment) { if (ptr == nullptr) { return allocate_raw(new_size, alignment); } if (new_size == old_size) { return ptr; } // Assert that we have a current block if ptr is not null assert(current_block_ && "realloc called with non-null ptr but no current block exists"); // Assert that offset is large enough (should always be true for // valid callers) assert(current_block_->offset >= old_size && "offset must be >= old_size for valid last allocation"); // Check if this was the last allocation by comparing with expected location char *expected_last_alloc_start = current_block_->data() + current_block_->offset - old_size; if (ptr == expected_last_alloc_start) { // This is indeed the last allocation if (new_size > old_size) { // Growing - check if we have space size_t additional_space_needed = new_size - old_size; if (current_block_->offset + additional_space_needed <= current_block_->size) { // We can extend in place current_block_->offset += additional_space_needed; return ptr; } } else { // Shrinking - just update the offset size_t space_to_free = old_size - new_size; current_block_->offset -= space_to_free; return new_size == 0 ? nullptr : ptr; } } // Can't extend in place if (new_size == 0) { // For non-last allocations, we can't reclaim memory but still return // nullptr return nullptr; } if (new_size <= old_size) { // Shrinking - no need to allocate, just return the same pointer return ptr; } // Growing but can't extend in place - need to allocate new space and copy void *new_ptr = allocate_raw(new_size, alignment); if (new_ptr && ptr) { // Copy all the old data since we're growing std::memcpy(new_ptr, ptr, old_size); } return new_ptr; } std::vector ArenaAllocator::find_intra_arena_pointers() const { std::vector pointers; if (!current_block_) { return pointers; } // Build list of blocks from current to first std::vector blocks; Block *block = current_block_; while (block) { blocks.push_back(block); block = block->prev; } // Helper function to check if a pointer value points within the used area // of any block auto is_intra_arena_pointer = [&blocks, this](uint64_t pointer_value) -> bool { for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) { Block *b = blocks[block_idx]; uintptr_t block_start = reinterpret_cast(b->data()); // Calculate used bytes in this specific block size_t block_used = b->offset; uintptr_t block_used_end = block_start + block_used; // Check if pointer falls within the used area of this block if (pointer_value >= block_start && pointer_value < block_used_end) { return true; } } return false; }; // Scan each block for pointers for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) { Block *b = blocks[block_idx]; const char *data = b->data(); // Calculate used bytes in this specific block size_t block_used; if (block_idx == 0) { // Current block - use offset block_used = current_block_->offset; } else { // Previous blocks are fully used block_used = b->size; } // Scan for 64-bit aligned pointers for (size_t offset = 0; offset + sizeof(uint64_t) <= block_used; offset += sizeof(uint64_t)) { uint64_t potential_pointer; std::memcpy(&potential_pointer, data + offset, sizeof(potential_pointer)); // Check if this value points within the used area of any block if (is_intra_arena_pointer(potential_pointer)) { // Find target location within arena auto target_location = find_address_location( reinterpret_cast(potential_pointer)); pointers.emplace_back( data + offset, // source address blocks.size() - block_idx, // source block number (1-based) offset, // source offset in block reinterpret_cast(potential_pointer), // target address target_location.found ? target_location.block_number : 0, // target block number target_location.found ? target_location.offset_in_block : 0 // target offset ); } } } return pointers; } ArenaAllocator::AddressLocation ArenaAllocator::find_address_location(const void *addr) const { if (!current_block_ || !addr) { return AddressLocation(); } uintptr_t target_addr = reinterpret_cast(addr); // Build list of blocks from current to first std::vector blocks; Block *block = current_block_; while (block) { blocks.push_back(block); block = block->prev; } // Check each block to see if the address falls within its used area for (size_t block_idx = 0; block_idx < blocks.size(); ++block_idx) { Block *b = blocks[block_idx]; uintptr_t block_start = reinterpret_cast(b->data()); // Calculate used bytes in this specific block size_t block_used; if (block_idx == 0) { // Current block - use offset block_used = current_block_->offset; } else { // Previous blocks are fully used block_used = b->size; } uintptr_t block_used_end = block_start + block_used; // Check if address falls within the used area of this block if (target_addr >= block_start && target_addr < block_used_end) { return AddressLocation( blocks.size() - block_idx, // block number (1-based) target_addr - block_start // offset within block ); } } return AddressLocation(); // Not found } void ArenaAllocator::debug_dump(std::ostream &out, bool show_memory_map, bool show_content, size_t content_limit) const { out << "=== Arena Debug Dump ===" << std::endl; if (!current_block_) { out << "Arena is empty (no blocks allocated)" << std::endl; out << "Initial block size: " << initial_block_size_ << " bytes" << std::endl; return; } // Build list of blocks from current to first std::vector blocks; Block *block = current_block_; while (block) { blocks.push_back(block); block = block->prev; } // Overall statistics size_t used = this->used_bytes(); size_t total_alloc = this->total_allocated(); double utilization = total_alloc > 0 ? (100.0 * used / total_alloc) : 0.0; out << "Total allocated: " << total_alloc << " bytes across " << num_blocks() << " blocks" << std::endl; out << "Currently used: " << used << " bytes (" << std::fixed << std::setprecision(1) << utilization << "% utilization)" << std::endl; out << "Available in current: " << available_in_current_block() << " bytes" << std::endl; out << std::endl; out << "Block Chain (newest to oldest):" << std::endl; // Display blocks in reverse order (current first) for (size_t i = 0; i < blocks.size(); ++i) { Block *b = blocks[i]; // Calculate used bytes in this specific block size_t block_used = b->offset; double block_util = b->size > 0 ? (100.0 * block_used / b->size) : 0.0; out << "Block #" << (blocks.size() - i) << ": " << b->size << " bytes " << "[used: " << block_used << "/" << b->size << " = " << std::fixed << std::setprecision(1) << block_util << "%]"; if (i == 0) { out << " <- current"; } out << std::endl; // Show memory map if requested if (show_memory_map && b->size > 0) { const size_t map_width = 60; size_t used_chars = (map_width * block_used) / b->size; used_chars = std::min(used_chars, map_width); out << " ["; for (size_t j = 0; j < map_width; ++j) { if (j < used_chars) { out << "#"; } else { out << "."; } } out << "] (# = used, . = free)" << std::endl; } } out << std::endl; out << "Block addresses and relationships:" << std::endl; for (size_t i = 0; i < blocks.size(); ++i) { Block *b = blocks[i]; out << "Block #" << (blocks.size() - i) << " @ " << static_cast(b) << " -> data @ " << static_cast(b->data()); if (b->prev) { out << " (prev: " << static_cast(b->prev) << ")"; } else { out << " (first block)"; } out << std::endl; } // Show memory contents if requested if (show_content) { out << std::endl; out << "Memory Contents:" << std::endl; for (size_t i = 0; i < blocks.size(); ++i) { Block *b = blocks[i]; size_t block_num = blocks.size() - i; // Calculate used bytes in this specific block size_t block_used = b->offset; if (block_used == 0) { out << "Block #" << block_num << ": No content (empty)" << std::endl; continue; } size_t bytes_to_show = std::min(block_used, content_limit); out << "Block #" << block_num << " (first " << bytes_to_show << " of " << block_used << " used bytes):" << std::endl; const char *data = b->data(); dump_memory_contents(out, data, bytes_to_show); if (bytes_to_show < block_used) { out << " ... (" << (block_used - bytes_to_show) << " more bytes)" << std::endl; } out << std::endl; } } } void ArenaAllocator::add_block(size_t size) { Block *new_block = Block::create(size, current_block_); current_block_ = new_block; } size_t ArenaAllocator::calculate_next_block_size(size_t required_size) const { size_t doubled_size = (current_block_ ? current_block_->size : 0) * 2; doubled_size = std::min(doubled_size, std::numeric_limits::max()); return std::max(required_size, doubled_size); } void ArenaAllocator::dump_memory_contents(std::ostream &out, const char *data, size_t size) { const size_t bytes_per_line = 16; for (size_t offset = 0; offset < size; offset += bytes_per_line) { // Print offset out << " 0x" << std::setfill('0') << std::setw(4) << std::hex << offset << ": "; size_t bytes_in_line = std::min(bytes_per_line, size - offset); // Print hex bytes for (size_t i = 0; i < bytes_per_line; ++i) { if (i < bytes_in_line) { unsigned char byte = static_cast(data[offset + i]); out << std::setfill('0') << std::setw(2) << std::hex << static_cast(byte); } else { out << " "; // Padding for incomplete lines } // Add space every 4 bytes for readability if ((i + 1) % 4 == 0) { out << " "; } } // Print ASCII representation out << " |"; for (size_t i = 0; i < bytes_in_line; ++i) { char c = data[offset + i]; if (c >= 32 && c <= 126) { // Printable ASCII out << c; } else { out << '.'; // Non-printable characters } } out << "|" << std::dec << std::endl; } }