diff --git a/VersionedMap.cpp b/VersionedMap.cpp index 1ef3a5a..92add7b 100644 --- a/VersionedMap.cpp +++ b/VersionedMap.cpp @@ -15,6 +15,11 @@ #define DEBUG_VERBOSE 0 #endif +#if DEBUG_VERBOSE +// Use to toggle debug verbose dynamically +bool debugVerboseEnabled = true; +#endif + void *mmapSafe(void *addr, size_t len, int prot, int flags, int fd, off_t offset) { void *result = mmap(addr, len, prot, flags, fd, offset); @@ -271,7 +276,9 @@ struct MemManager { for (int i = newFirstUnaddressable; i < firstUnaddressable; ++i) { if (base[i].entry != nullptr) { #if DEBUG_VERBOSE - printf("Collecting %u\n", i); + if (debugVerboseEnabled) { + printf("Collecting %u while shrinking right\n", i); + } #endif base[i].entry->delref(); } @@ -289,7 +296,9 @@ struct MemManager { [&](uint32_t i) { if (base[i].entry != nullptr) { #if DEBUG_VERBOSE - printf("Collecting %u\n", i); + if (debugVerboseEnabled) { + printf("Collecting %u while building free list\n", i); + } #endif base[i].entry->delref(); base[i].entry = nullptr; @@ -430,12 +439,15 @@ struct VersionedMap::Impl { static_assert(kOrder == std::memory_order_acquire || kOrder == std::memory_order_relaxed); auto &n = mm.base[node]; + uint32_t result; if (n.updated.load(kOrder) && n.updateVersion <= at && which == n.replacedPointer) { - return n.pointer[2]; + result = n.pointer[2]; } else { - return n.pointer[which]; + result = n.pointer[which]; } + assert(result == 0 || result >= kMinAddressable); + return result; } template @@ -449,7 +461,9 @@ struct VersionedMap::Impl { } // Returns the node that results from setting `which` to `child` on `node` - uint32_t update(uint32_t node, int64_t version, bool which, uint32_t child) { + uint32_t update(uint32_t node, bool which, uint32_t child, int64_t version) { + assert(node == 0 || node >= kMinAddressable); + assert(child == 0 || child >= kMinAddressable); if (this->child(node, which, version) == child) { return node; } @@ -464,10 +478,13 @@ struct VersionedMap::Impl { c.pointer[!which] = n.pointer[!which]; c.updated.store(false, std::memory_order_relaxed); c.updateVersion = version; + assert(copy == 0 || copy >= kMinAddressable); return copy; }; if (n.updateVersion == version) { + // The reason these aren't data races is that concurrent readers are + // reading < `version` if (updated && n.replacedPointer != which) { // We can't update n.replacedPointer without introducing a data race // (unless we packed it into the atomic?) so we copy. pointer[2] becomes @@ -479,6 +496,7 @@ struct VersionedMap::Impl { } else { n.pointer[which] = child; } + assert(node == 0 || node >= kMinAddressable); return node; } @@ -490,6 +508,7 @@ struct VersionedMap::Impl { n.pointer[2] = child; n.replacedPointer = which; n.updated.store(true, std::memory_order_release); // Must be last + assert(node == 0 || node >= kMinAddressable); return node; } } @@ -544,9 +563,9 @@ struct VersionedMap::Impl { finger.pop(); auto parent = finger.back(); const bool direction = directionStack[--directionStackSize]; - parent = update(parent, latestVersion, direction, node); + parent = update(parent, direction, node, latestVersion); if (mm.base[node].entry->priority > mm.base[parent].entry->priority) { - rotate(parent, latestVersion, direction); + rotate(parent, latestVersion, !direction); } else { if (parent == finger.back()) { break; @@ -610,7 +629,6 @@ struct VersionedMap::Impl { #include int main() { - { weaselab::VersionedMap::Impl impl; impl.latestRoot = impl.roots.roots()[impl.roots.rootCount() - 1]; @@ -630,7 +648,8 @@ int main() { } ankerl::nanobench::Bench bench; - bench.minEpochIterations(5000); + bench.minEpochIterations(10000); + weaselab::MemManager mm; bench.run("allocate", [&]() { auto x = mm.allocate(); @@ -670,5 +689,29 @@ int main() { bench.doNotOptimizeAway(roots.rootForVersion(i - kNumVersions / 2)); }); } + + { + weaselab::VersionedMap::Impl impl; + bench.run("insert fixed entry", [&]() { + ++impl.latestVersion; + impl.latestRoot = impl.roots.roots()[impl.roots.rootCount() - 1]; + impl.insert(weaselab::VersionedMap::Mutation{ + (const uint8_t *)"a", nullptr, 1, 0, weaselab::VersionedMap::Set}); + impl.roots.add(impl.latestRoot, impl.latestVersion); + }); + } + + { + weaselab::VersionedMap::Impl impl; + bench.run("insert fresh entry", [&]() { + ++impl.latestVersion; + impl.latestRoot = impl.roots.roots()[impl.roots.rootCount() - 1]; + auto k = __builtin_bswap64(impl.latestVersion); + impl.insert(weaselab::VersionedMap::Mutation{ + (const uint8_t *)&k, nullptr, sizeof(k), 0, + weaselab::VersionedMap::Set}); + impl.roots.add(impl.latestRoot, impl.latestVersion); + }); + } } #endif