diff --git a/VersionedMap.cpp b/VersionedMap.cpp index 4c03cd0..25eae08 100644 --- a/VersionedMap.cpp +++ b/VersionedMap.cpp @@ -15,6 +15,8 @@ #include #include +#include + static_assert(std::is_standard_layout_v); static_assert(std::is_standard_layout_v); static_assert(std::is_standard_layout_v); @@ -245,6 +247,7 @@ struct MemManager { if (next == firstUnaddressable) { mprotectSafe(base + firstUnaddressable, kUpsizeBytes, PROT_READ | PROT_WRITE); + VALGRIND_MAKE_MEM_UNDEFINED(base + firstUnaddressable, kUpsizeBytes); firstUnaddressable += kUpsizeNodes; #if SHOW_MEMORY mmapBytes = getBytes(); @@ -311,49 +314,14 @@ struct MemManager { } } - // Reclaim memory on the right side uint32_t max = reachable.max(); if (max == 0) { max = kMinAddressable - 1; } + assert(max < next); - uint32_t newFirstUnaddressable = (max / kNodesPerPage + 1) * kNodesPerPage; - if (newFirstUnaddressable < firstUnaddressable) { - for (int i = newFirstUnaddressable; i < firstUnaddressable; ++i) { - if (base[i].entry != nullptr) { -#if DEBUG_VERBOSE - if (debugVerboseEnabled) { - printf("Collecting %u while shrinking right\n", i); - } -#endif - base[i].entry->delref(); - } - } - mprotectSafe(base + newFirstUnaddressable, - (firstUnaddressable - newFirstUnaddressable) * sizeof(Node), - PROT_NONE); - firstUnaddressable = newFirstUnaddressable; -#if SHOW_MEMORY - mmapBytes = getBytes(); -#endif - } - next = max + 1; - for (int i = next; i < firstUnaddressable; ++i) { - if (base[i].entry != nullptr) { -#if DEBUG_VERBOSE - if (debugVerboseEnabled) { - printf( - "Collecting %u while collecting right page-unaligned surplus\n", - i); - } -#endif - base[i].entry->delref(); - base[i].entry = nullptr; - } - } - - // Rebuild free list and delref entries + // Rebuild free list to prefer leftward nodes freeList = 0; reachable.iterateAbsentApproxBackwards( [&](uint32_t i) { @@ -369,7 +337,33 @@ struct MemManager { base[i].nextFree = freeList; freeList = i; }, - kMinAddressable, next); + kMinAddressable, max + 1); + + // Entries to the right of max don't need to be in the freelist. They're + // allocated by pointer bumping. + for (int i = max + 1; i < next; ++i) { + if (base[i].entry != nullptr) { +#if DEBUG_VERBOSE + if (debugVerboseEnabled) { + printf("Collecting %u while shrinking right\n", i); + } +#endif + base[i].entry->delref(); + base[i].entry = nullptr; + } + } + + uint32_t newFirstUnaddressable = (max / kNodesPerPage + 1) * kNodesPerPage; + if (newFirstUnaddressable < firstUnaddressable) { + mprotectSafe(base + newFirstUnaddressable, + (firstUnaddressable - newFirstUnaddressable) * sizeof(Node), + PROT_NONE); + firstUnaddressable = newFirstUnaddressable; +#if SHOW_MEMORY + mmapBytes = getBytes(); +#endif + } + next = max + 1; } int64_t getBytes() const {