From 8e33b477ebf14dacfce8af6ab94e6a3381dade2c Mon Sep 17 00:00:00 2001 From: Andrew Noyes Date: Fri, 15 Aug 2025 13:31:45 -0400 Subject: [PATCH] Make allocate take a template type So we use the right alignment --- benchmarks/bench_arena_allocator.cpp | 2 +- src/arena_allocator.cpp | 4 +- src/arena_allocator.hpp | 67 +++++++++++++++-- src/commit_request.cpp | 4 +- tests/test_arena_allocator.cpp | 103 ++++++++++++++------------- 5 files changed, 117 insertions(+), 63 deletions(-) diff --git a/benchmarks/bench_arena_allocator.cpp b/benchmarks/bench_arena_allocator.cpp index 526804e..6bdc5ef 100644 --- a/benchmarks/bench_arena_allocator.cpp +++ b/benchmarks/bench_arena_allocator.cpp @@ -16,7 +16,7 @@ int main() { // Arena allocator benchmark ArenaAllocator arena; bench.run("ArenaAllocator", [&] { - void *ptr = arena.allocate(alloc_size); + void *ptr = arena.allocate_raw(alloc_size); ankerl::nanobench::doNotOptimizeAway(ptr); }); } diff --git a/src/arena_allocator.cpp b/src/arena_allocator.cpp index 32ef973..df4a0d4 100644 --- a/src/arena_allocator.cpp +++ b/src/arena_allocator.cpp @@ -67,7 +67,7 @@ void ArenaAllocator::reset() { void *ArenaAllocator::realloc(void *ptr, size_t old_size, size_t new_size, size_t alignment) { if (ptr == nullptr) { - return allocate(new_size, alignment); + return allocate_raw(new_size, alignment); } if (new_size == old_size) { @@ -119,7 +119,7 @@ void *ArenaAllocator::realloc(void *ptr, size_t old_size, size_t new_size, } // Growing but can't extend in place - need to allocate new space and copy - void *new_ptr = allocate(new_size, alignment); + void *new_ptr = allocate_raw(new_size, alignment); if (new_ptr && ptr) { // Copy all the old data since we're growing std::memcpy(new_ptr, ptr, old_size); diff --git a/src/arena_allocator.hpp b/src/arena_allocator.hpp index aace826..6467be6 100644 --- a/src/arena_allocator.hpp +++ b/src/arena_allocator.hpp @@ -150,12 +150,14 @@ public: ArenaAllocator &operator=(ArenaAllocator &&other) noexcept; /** - * @brief Allocate memory with the specified size and alignment. + * @brief Allocate raw memory with the specified size and alignment. * * This is the core allocation method providing ~1ns allocation performance. * It performs lazy initialization on first use and automatically grows * the arena when needed using geometric growth (doubling block sizes). * + * For type-safe allocation, prefer the allocate() template method. + * * @param size Number of bytes to allocate (0 returns nullptr) * @param alignment Required alignment (default: alignof(std::max_align_t)) * @return Pointer to allocated memory, or nullptr if size is 0 @@ -168,10 +170,10 @@ public: * * ## Example: * ```cpp - * void* ptr1 = arena.allocate(100); // Default alignment - * void* ptr2 = arena.allocate(64, 16); // 16-byte aligned + * void* ptr1 = arena.allocate_raw(100); // Default alignment + * void* ptr2 = arena.allocate_raw(64, 16); // 16-byte aligned * MyStruct* ptr3 = static_cast( - * arena.allocate(sizeof(MyStruct), alignof(MyStruct))); + * arena.allocate_raw(sizeof(MyStruct), alignof(MyStruct))); * ``` * * ## Performance Note: @@ -179,7 +181,8 @@ public: * The allocation path is extremely hot and inlining eliminates function * call overhead, allowing the ~1ns allocation performance. */ - void *allocate(size_t size, size_t alignment = alignof(std::max_align_t)) { + void *allocate_raw(size_t size, + size_t alignment = alignof(std::max_align_t)) { if (size == 0) { return nullptr; } @@ -288,10 +291,60 @@ public: "ArenaAllocator::construct requires trivially destructible types. " "Objects constructed in the arena will not have their destructors " "called."); - void *ptr = allocate(sizeof(T), alignof(T)); + void *ptr = allocate_raw(sizeof(T), alignof(T)); return new (ptr) T(std::forward(args)...); } + /** + * @brief Allocate space for an array of size T objects with proper alignment. + * + * This is a type-safe convenience method that combines sizing and alignment + * calculations for allocating arrays of type T. It's preferred over calling + * allocate_raw() directly as it prevents common errors with size calculations + * and alignment requirements. + * + * @tparam T The type of objects to allocate space for (must be trivially + * destructible) + * @param size Number of T objects to allocate space for + * @return Pointer to allocated memory suitable for constructing an array of T + * objects + * @throws std::bad_alloc if memory allocation fails + * + * ## Type Requirements: + * T must be trivially destructible (std::is_trivially_destructible_v). + * This ensures consistency with the arena allocator's design where + * destructors are never called. + * + * ## Example: + * ```cpp + * // Allocate space for 100 integers + * int* numbers = arena.allocate(100); + * + * // Allocate space for 50 POD structs + * MyPOD* objects = arena.allocate(50); + * + * // Initialize some elements (no automatic construction) + * numbers[0] = 42; + * new (&objects[0]) MyPOD(arg1, arg2); + * ``` + * + * ## Note: + * This method only allocates memory - it does not construct objects. + * Use placement new or other initialization methods as needed. + */ + template T *allocate(size_t size) { + static_assert( + std::is_trivially_destructible_v, + "ArenaAllocator::allocate requires trivially destructible types. " + "Objects allocated in the arena will not have their destructors " + "called."); + if (size == 0) { + return nullptr; + } + void *ptr = allocate_raw(sizeof(T) * size, alignof(T)); + return static_cast(ptr); + } + /** * @brief Reset the allocator to reuse the first block, freeing all others. * @@ -542,7 +595,7 @@ public: T *allocate(size_type n) { if (n == 0) return nullptr; - return static_cast(arena_->allocate(n * sizeof(T), alignof(T))); + return static_cast(arena_->allocate_raw(n * sizeof(T), alignof(T))); } void deallocate(T *ptr, size_type n) noexcept { diff --git a/src/commit_request.cpp b/src/commit_request.cpp index f3ffcc6..830fc1f 100644 --- a/src/commit_request.cpp +++ b/src/commit_request.cpp @@ -45,7 +45,7 @@ std::string_view CommitRequest::store_string(std::string_view str) { return {}; } - char *arena_str = static_cast(arena_.allocate(str.size())); + char *arena_str = arena_.allocate(str.size()); std::memcpy(arena_str, str.data(), str.size()); return std::string_view(arena_str, str.size()); @@ -78,7 +78,7 @@ std::string_view CommitRequest::decode_base64(std::string_view base64_str) { return {}; } - char *output = static_cast(arena_.allocate(output_len)); + char *output = arena_.allocate(output_len); if (!output) { return {}; } diff --git a/tests/test_arena_allocator.cpp b/tests/test_arena_allocator.cpp index cf25cdd..e6092f8 100644 --- a/tests/test_arena_allocator.cpp +++ b/tests/test_arena_allocator.cpp @@ -24,20 +24,20 @@ TEST_CASE("ArenaAllocator basic allocation") { ArenaAllocator arena; SUBCASE("allocate zero bytes returns nullptr") { - void *ptr = arena.allocate(0); + void *ptr = arena.allocate_raw(0); CHECK(ptr == nullptr); CHECK(arena.used_bytes() == 0); } SUBCASE("allocate single byte") { - void *ptr = arena.allocate(1); + void *ptr = arena.allocate_raw(1); CHECK(ptr != nullptr); CHECK(arena.used_bytes() >= 1); } SUBCASE("allocate multiple bytes") { - void *ptr1 = arena.allocate(100); - void *ptr2 = arena.allocate(200); + void *ptr1 = arena.allocate_raw(100); + void *ptr2 = arena.allocate_raw(200); CHECK(ptr1 != nullptr); CHECK(ptr2 != nullptr); @@ -50,24 +50,24 @@ TEST_CASE("ArenaAllocator alignment") { ArenaAllocator arena; SUBCASE("default alignment") { - void *ptr = arena.allocate(1); + void *ptr = arena.allocate_raw(1); CHECK(reinterpret_cast(ptr) % alignof(std::max_align_t) == 0); } SUBCASE("custom alignment") { - void *ptr8 = arena.allocate(1, 8); + void *ptr8 = arena.allocate_raw(1, 8); CHECK(reinterpret_cast(ptr8) % 8 == 0); - void *ptr16 = arena.allocate(1, 16); + void *ptr16 = arena.allocate_raw(1, 16); CHECK(reinterpret_cast(ptr16) % 16 == 0); - void *ptr32 = arena.allocate(1, 32); + void *ptr32 = arena.allocate_raw(1, 32); CHECK(reinterpret_cast(ptr32) % 32 == 0); } SUBCASE("alignment with larger allocations") { ArenaAllocator fresh_arena; - void *ptr = fresh_arena.allocate(100, 64); + void *ptr = fresh_arena.allocate_raw(100, 64); CHECK(reinterpret_cast(ptr) % 64 == 0); } } @@ -76,23 +76,23 @@ TEST_CASE("ArenaAllocator block management") { ArenaAllocator arena(128); SUBCASE("single block allocation") { - void *ptr = arena.allocate(64); + void *ptr = arena.allocate_raw(64); CHECK(ptr != nullptr); CHECK(arena.num_blocks() == 1); CHECK(arena.used_bytes() == 64); } SUBCASE("multiple blocks when size exceeded") { - void *ptr1 = arena.allocate(100); + void *ptr1 = arena.allocate_raw(100); CHECK(arena.num_blocks() == 1); - void *ptr2 = arena.allocate(50); + void *ptr2 = arena.allocate_raw(50); CHECK(arena.num_blocks() == 2); CHECK(ptr1 != ptr2); } SUBCASE("allocation larger than block size grows arena") { - void *ptr = arena.allocate(200); + void *ptr = arena.allocate_raw(200); CHECK(ptr != nullptr); CHECK(arena.num_blocks() == 1); } @@ -145,8 +145,8 @@ TEST_CASE("ArenaAllocator construct template") { TEST_CASE("ArenaAllocator reset functionality") { ArenaAllocator arena; - arena.allocate(100); - arena.allocate(200); + arena.allocate_raw(100); + arena.allocate_raw(200); size_t used_before = arena.used_bytes(); CHECK(used_before > 0); @@ -154,7 +154,7 @@ TEST_CASE("ArenaAllocator reset functionality") { CHECK(arena.used_bytes() == 0); CHECK(arena.num_blocks() >= 1); - void *ptr = arena.allocate(50); + void *ptr = arena.allocate_raw(50); CHECK(ptr != nullptr); CHECK(arena.used_bytes() == 50); } @@ -163,14 +163,15 @@ TEST_CASE("ArenaAllocator reset memory leak test") { ArenaAllocator arena(32); // Smaller initial size // Force multiple blocks - arena.allocate(30); // First block (32 bytes) + arena.allocate_raw(30); // First block (32 bytes) CHECK(arena.num_blocks() == 1); - arena.allocate(30); // Should create second block (64 bytes due to doubling) + arena.allocate_raw( + 30); // Should create second block (64 bytes due to doubling) CHECK(arena.num_blocks() == 2); - arena.allocate(100); // Should create third block (128 bytes due to doubling, - // or larger for 100) + arena.allocate_raw(100); // Should create third block (128 bytes due to + // doubling, or larger for 100) CHECK(arena.num_blocks() == 3); size_t total_before = arena.total_allocated(); @@ -185,7 +186,7 @@ TEST_CASE("ArenaAllocator reset memory leak test") { CHECK(arena.used_bytes() == 0); // Should be able to use the first block again - void *ptr = arena.allocate(20); + void *ptr = arena.allocate_raw(20); CHECK(ptr != nullptr); CHECK(arena.used_bytes() == 20); } @@ -197,14 +198,14 @@ TEST_CASE("ArenaAllocator memory tracking") { CHECK(arena.used_bytes() == 0); CHECK(arena.available_in_current_block() == 0); - arena.allocate(100); + arena.allocate_raw(100); CHECK(arena.used_bytes() >= 100); CHECK(arena.available_in_current_block() <= 412); - arena.allocate(400); + arena.allocate_raw(400); CHECK(arena.num_blocks() == 1); - arena.allocate(50); + arena.allocate_raw(50); CHECK(arena.num_blocks() == 2); CHECK(arena.total_allocated() >= 1024); } @@ -215,7 +216,7 @@ TEST_CASE("ArenaAllocator stress test") { SUBCASE("many small allocations") { std::vector ptrs; for (int i = 0; i < 1000; ++i) { - void *ptr = arena.allocate(8); + void *ptr = arena.allocate_raw(8); CHECK(ptr != nullptr); ptrs.push_back(ptr); } @@ -227,8 +228,8 @@ TEST_CASE("ArenaAllocator stress test") { SUBCASE("alternating small and large allocations") { for (int i = 0; i < 50; ++i) { - void *small_ptr = arena.allocate(16); - void *large_ptr = arena.allocate(256); + void *small_ptr = arena.allocate_raw(16); + void *large_ptr = arena.allocate_raw(256); CHECK(small_ptr != nullptr); CHECK(large_ptr != nullptr); CHECK(small_ptr != large_ptr); @@ -238,7 +239,7 @@ TEST_CASE("ArenaAllocator stress test") { TEST_CASE("ArenaAllocator move semantics") { ArenaAllocator arena1(512); - arena1.allocate(100); + arena1.allocate_raw(100); size_t used_bytes = arena1.used_bytes(); size_t num_blocks = arena1.num_blocks(); @@ -246,25 +247,25 @@ TEST_CASE("ArenaAllocator move semantics") { CHECK(arena2.used_bytes() == used_bytes); CHECK(arena2.num_blocks() == num_blocks); - void *ptr = arena2.allocate(50); + void *ptr = arena2.allocate_raw(50); CHECK(ptr != nullptr); } TEST_CASE("ArenaAllocator edge cases") { SUBCASE("very small block size") { ArenaAllocator arena(16); - void *ptr = arena.allocate(8); + void *ptr = arena.allocate_raw(8); CHECK(ptr != nullptr); CHECK(arena.num_blocks() == 1); } SUBCASE("allocation exactly block size") { ArenaAllocator arena(64); - void *ptr = arena.allocate(64); + void *ptr = arena.allocate_raw(64); CHECK(ptr != nullptr); CHECK(arena.num_blocks() == 1); - void *ptr2 = arena.allocate(1); + void *ptr2 = arena.allocate_raw(1); CHECK(ptr2 != nullptr); CHECK(arena.num_blocks() == 2); } @@ -272,7 +273,7 @@ TEST_CASE("ArenaAllocator edge cases") { SUBCASE("multiple resets") { ArenaAllocator arena; for (int i = 0; i < 10; ++i) { - arena.allocate(100); + arena.allocate_raw(100); arena.reset(); CHECK(arena.used_bytes() == 0); } @@ -308,19 +309,19 @@ TEST_CASE("ArenaAllocator geometric growth policy") { ArenaAllocator arena(64); SUBCASE("normal geometric growth doubles size") { - arena.allocate(60); // Fill first block + arena.allocate_raw(60); // Fill first block size_t initial_total = arena.total_allocated(); - arena.allocate(10); // Force new block + arena.allocate_raw(10); // Force new block CHECK(arena.num_blocks() == 2); CHECK(arena.total_allocated() == initial_total + 128); // 64 * 2 = 128 } SUBCASE("large allocation creates appropriately sized block") { - arena.allocate(60); // Fill first block + arena.allocate_raw(60); // Fill first block size_t initial_total = arena.total_allocated(); - arena.allocate(200); // Force large block + arena.allocate_raw(200); // Force large block CHECK(arena.num_blocks() == 2); CHECK(arena.total_allocated() >= initial_total + 200); // At least 200 bytes } @@ -329,7 +330,7 @@ TEST_CASE("ArenaAllocator geometric growth policy") { size_t allocation_size = 32; for (int i = 0; i < 10; ++i) { - arena.allocate(allocation_size); + arena.allocate_raw(allocation_size); } // Should have grown logarithmically, not linearly @@ -341,8 +342,8 @@ TEST_CASE("ArenaAllocator alignment edge cases") { ArenaAllocator arena; SUBCASE("unaligned then aligned allocation") { - void *ptr1 = arena.allocate(1, 1); - void *ptr2 = arena.allocate(8, 8); + void *ptr1 = arena.allocate_raw(1, 1); + void *ptr2 = arena.allocate_raw(8, 8); CHECK(ptr1 != nullptr); CHECK(ptr2 != nullptr); @@ -351,7 +352,7 @@ TEST_CASE("ArenaAllocator alignment edge cases") { SUBCASE("large alignment requirements") { ArenaAllocator fresh_arena; - void *ptr = fresh_arena.allocate(1, 128); + void *ptr = fresh_arena.allocate_raw(1, 128); CHECK(ptr != nullptr); CHECK(reinterpret_cast(ptr) % 128 == 0); } @@ -364,7 +365,7 @@ TEST_CASE("ArenaAllocator realloc functionality") { // realloc with new_size == 0 returns nullptr and reclaims memory if it's // the last allocation ArenaAllocator fresh_arena(256); - void *ptr = fresh_arena.allocate(100); + void *ptr = fresh_arena.allocate_raw(100); size_t used_before = fresh_arena.used_bytes(); CHECK(used_before == 100); @@ -374,8 +375,8 @@ TEST_CASE("ArenaAllocator realloc functionality") { // Test case where it's NOT the last allocation - memory cannot be reclaimed ArenaAllocator arena2(256); - void *ptr1 = arena2.allocate(50); - void *ptr2 = arena2.allocate(50); + void *ptr1 = arena2.allocate_raw(50); + void *ptr2 = arena2.allocate_raw(50); size_t used_before2 = arena2.used_bytes(); CHECK(used_before2 >= 100); // At least 100 bytes due to potential alignment @@ -397,7 +398,7 @@ TEST_CASE("ArenaAllocator realloc functionality") { SUBCASE("in-place extension - growing") { ArenaAllocator fresh_arena(1024); - void *ptr = fresh_arena.allocate(100); + void *ptr = fresh_arena.allocate_raw(100); CHECK(ptr != nullptr); // Fill the allocation with test data @@ -418,7 +419,7 @@ TEST_CASE("ArenaAllocator realloc functionality") { SUBCASE("in-place shrinking") { ArenaAllocator fresh_arena(1024); - void *ptr = fresh_arena.allocate(200); + void *ptr = fresh_arena.allocate_raw(200); std::memset(ptr, 0xCD, 200); // Should shrink in place @@ -437,11 +438,11 @@ TEST_CASE("ArenaAllocator realloc functionality") { ArenaAllocator fresh_arena(256); // Larger block to avoid edge cases // Allocate first chunk - void *ptr1 = fresh_arena.allocate(60); + void *ptr1 = fresh_arena.allocate_raw(60); std::memset(ptr1, 0x11, 60); // Allocate second chunk (this prevents in-place extension of ptr1) - void *ptr2 = fresh_arena.allocate(30); + void *ptr2 = fresh_arena.allocate_raw(30); std::memset(ptr2, 0x22, 30); // Try to reallocate ptr1 - should copy since ptr2 is now the last @@ -472,7 +473,7 @@ TEST_CASE("ArenaAllocator realloc functionality") { ArenaAllocator fresh_arena(100); // Allocate almost all space - void *ptr = fresh_arena.allocate(90); + void *ptr = fresh_arena.allocate_raw(90); std::memset(ptr, 0x33, 90); // Try to extend beyond block size - should copy to new block @@ -491,7 +492,7 @@ TEST_CASE("ArenaAllocator realloc functionality") { ArenaAllocator fresh_arena(1024); // Allocate with specific alignment - void *ptr = fresh_arena.allocate(50, 16); + void *ptr = fresh_arena.allocate_raw(50, 16); CHECK(reinterpret_cast(ptr) % 16 == 0); std::memset(ptr, 0x44, 50); @@ -509,7 +510,7 @@ TEST_CASE("ArenaAllocator realloc functionality") { SUBCASE("realloc stress test") { ArenaAllocator fresh_arena(512); - void *ptr = fresh_arena.allocate(50); + void *ptr = fresh_arena.allocate_raw(50); size_t current_size = 50; // Fill with pattern