Fix bug where gc was scanning uninit memory
This commit is contained in:
@@ -15,6 +15,8 @@
|
|||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
|
||||||
|
#include <memcheck.h>
|
||||||
|
|
||||||
static_assert(std::is_standard_layout_v<weaselab::VersionedMap::MutationType>);
|
static_assert(std::is_standard_layout_v<weaselab::VersionedMap::MutationType>);
|
||||||
static_assert(std::is_standard_layout_v<weaselab::VersionedMap::Key>);
|
static_assert(std::is_standard_layout_v<weaselab::VersionedMap::Key>);
|
||||||
static_assert(std::is_standard_layout_v<weaselab::VersionedMap::Mutation>);
|
static_assert(std::is_standard_layout_v<weaselab::VersionedMap::Mutation>);
|
||||||
@@ -245,6 +247,7 @@ struct MemManager {
|
|||||||
if (next == firstUnaddressable) {
|
if (next == firstUnaddressable) {
|
||||||
mprotectSafe(base + firstUnaddressable, kUpsizeBytes,
|
mprotectSafe(base + firstUnaddressable, kUpsizeBytes,
|
||||||
PROT_READ | PROT_WRITE);
|
PROT_READ | PROT_WRITE);
|
||||||
|
VALGRIND_MAKE_MEM_UNDEFINED(base + firstUnaddressable, kUpsizeBytes);
|
||||||
firstUnaddressable += kUpsizeNodes;
|
firstUnaddressable += kUpsizeNodes;
|
||||||
#if SHOW_MEMORY
|
#if SHOW_MEMORY
|
||||||
mmapBytes = getBytes();
|
mmapBytes = getBytes();
|
||||||
@@ -311,49 +314,14 @@ struct MemManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reclaim memory on the right side
|
|
||||||
uint32_t max = reachable.max();
|
uint32_t max = reachable.max();
|
||||||
if (max == 0) {
|
if (max == 0) {
|
||||||
max = kMinAddressable - 1;
|
max = kMinAddressable - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(max < next);
|
assert(max < next);
|
||||||
uint32_t newFirstUnaddressable = (max / kNodesPerPage + 1) * kNodesPerPage;
|
|
||||||
if (newFirstUnaddressable < firstUnaddressable) {
|
|
||||||
for (int i = newFirstUnaddressable; i < firstUnaddressable; ++i) {
|
|
||||||
if (base[i].entry != nullptr) {
|
|
||||||
#if DEBUG_VERBOSE
|
|
||||||
if (debugVerboseEnabled) {
|
|
||||||
printf("Collecting %u while shrinking right\n", i);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
base[i].entry->delref();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
mprotectSafe(base + newFirstUnaddressable,
|
|
||||||
(firstUnaddressable - newFirstUnaddressable) * sizeof(Node),
|
|
||||||
PROT_NONE);
|
|
||||||
firstUnaddressable = newFirstUnaddressable;
|
|
||||||
#if SHOW_MEMORY
|
|
||||||
mmapBytes = getBytes();
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
next = max + 1;
|
|
||||||
|
|
||||||
for (int i = next; i < firstUnaddressable; ++i) {
|
// Rebuild free list to prefer leftward nodes
|
||||||
if (base[i].entry != nullptr) {
|
|
||||||
#if DEBUG_VERBOSE
|
|
||||||
if (debugVerboseEnabled) {
|
|
||||||
printf(
|
|
||||||
"Collecting %u while collecting right page-unaligned surplus\n",
|
|
||||||
i);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
base[i].entry->delref();
|
|
||||||
base[i].entry = nullptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuild free list and delref entries
|
|
||||||
freeList = 0;
|
freeList = 0;
|
||||||
reachable.iterateAbsentApproxBackwards(
|
reachable.iterateAbsentApproxBackwards(
|
||||||
[&](uint32_t i) {
|
[&](uint32_t i) {
|
||||||
@@ -369,7 +337,33 @@ struct MemManager {
|
|||||||
base[i].nextFree = freeList;
|
base[i].nextFree = freeList;
|
||||||
freeList = i;
|
freeList = i;
|
||||||
},
|
},
|
||||||
kMinAddressable, next);
|
kMinAddressable, max + 1);
|
||||||
|
|
||||||
|
// Entries to the right of max don't need to be in the freelist. They're
|
||||||
|
// allocated by pointer bumping.
|
||||||
|
for (int i = max + 1; i < next; ++i) {
|
||||||
|
if (base[i].entry != nullptr) {
|
||||||
|
#if DEBUG_VERBOSE
|
||||||
|
if (debugVerboseEnabled) {
|
||||||
|
printf("Collecting %u while shrinking right\n", i);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
base[i].entry->delref();
|
||||||
|
base[i].entry = nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t newFirstUnaddressable = (max / kNodesPerPage + 1) * kNodesPerPage;
|
||||||
|
if (newFirstUnaddressable < firstUnaddressable) {
|
||||||
|
mprotectSafe(base + newFirstUnaddressable,
|
||||||
|
(firstUnaddressable - newFirstUnaddressable) * sizeof(Node),
|
||||||
|
PROT_NONE);
|
||||||
|
firstUnaddressable = newFirstUnaddressable;
|
||||||
|
#if SHOW_MEMORY
|
||||||
|
mmapBytes = getBytes();
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
next = max + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t getBytes() const {
|
int64_t getBytes() const {
|
||||||
|
Reference in New Issue
Block a user