|
|
@@ -227,23 +227,28 @@ enum class Type : int8_t {
|
|
|
|
Node256,
|
|
|
|
Node256,
|
|
|
|
Invalid,
|
|
|
|
Invalid,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
constexpr static int kPartialKeyMaxLenEntryPresent = 24;
|
|
|
|
|
|
|
|
|
|
|
|
struct Node {
|
|
|
|
struct Node {
|
|
|
|
|
|
|
|
Type type = Type::Invalid;
|
|
|
|
|
|
|
|
|
|
|
|
/* begin section that's copied to the next node */
|
|
|
|
/* begin section that's copied to the next node */
|
|
|
|
Node *parent = nullptr;
|
|
|
|
|
|
|
|
// The max write version over all keys that start with the search path up to
|
|
|
|
|
|
|
|
// this point
|
|
|
|
|
|
|
|
Entry entry;
|
|
|
|
|
|
|
|
int16_t numChildren = 0;
|
|
|
|
|
|
|
|
bool entryPresent = false;
|
|
|
|
bool entryPresent = false;
|
|
|
|
uint8_t parentsIndex = 0;
|
|
|
|
uint8_t parentsIndex = 0;
|
|
|
|
constexpr static auto kPartialKeyMaxLen = 26;
|
|
|
|
|
|
|
|
uint8_t partialKey[kPartialKeyMaxLen];
|
|
|
|
|
|
|
|
int8_t partialKeyLen = 0;
|
|
|
|
int8_t partialKeyLen = 0;
|
|
|
|
|
|
|
|
int32_t numChildren = 0;
|
|
|
|
|
|
|
|
Node *parent = nullptr;
|
|
|
|
|
|
|
|
uint8_t partialKey[kPartialKeyMaxLenEntryPresent];
|
|
|
|
|
|
|
|
// If not entryPresent, then the partial key might spill over into entry
|
|
|
|
|
|
|
|
Entry entry;
|
|
|
|
/* end section that's copied to the next node */
|
|
|
|
/* end section that's copied to the next node */
|
|
|
|
|
|
|
|
|
|
|
|
Type type = Type::Invalid;
|
|
|
|
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static_assert(offsetof(Node, entry) ==
|
|
|
|
|
|
|
|
offsetof(Node, partialKey) + kPartialKeyMaxLenEntryPresent);
|
|
|
|
|
|
|
|
static_assert(std::is_pod_v<Entry>);
|
|
|
|
|
|
|
|
|
|
|
|
struct Child {
|
|
|
|
struct Child {
|
|
|
|
int64_t childMaxVersion;
|
|
|
|
int64_t childMaxVersion;
|
|
|
|
Node *child;
|
|
|
|
Node *child;
|
|
|
@@ -566,7 +571,9 @@ Node *&getOrCreateChild(Node *&self, uint8_t index,
|
|
|
|
if (self->numChildren == 16) {
|
|
|
|
if (self->numChildren == 16) {
|
|
|
|
auto *self16 = static_cast<Node16 *>(self);
|
|
|
|
auto *self16 = static_cast<Node16 *>(self);
|
|
|
|
auto *newSelf = allocators->node48.allocate();
|
|
|
|
auto *newSelf = allocators->node48.allocate();
|
|
|
|
memcpy((void *)newSelf, self, offsetof(Node, type));
|
|
|
|
memcpy((char *)newSelf + sizeof(Node::type),
|
|
|
|
|
|
|
|
(char *)self + sizeof(Node::type),
|
|
|
|
|
|
|
|
sizeof(Node) - sizeof(Node::type));
|
|
|
|
newSelf->nextFree = 16;
|
|
|
|
newSelf->nextFree = 16;
|
|
|
|
int i = 0;
|
|
|
|
int i = 0;
|
|
|
|
for (auto x : self16->index) {
|
|
|
|
for (auto x : self16->index) {
|
|
|
@@ -605,7 +612,9 @@ Node *&getOrCreateChild(Node *&self, uint8_t index,
|
|
|
|
if (self->numChildren == 48) {
|
|
|
|
if (self->numChildren == 48) {
|
|
|
|
auto *self48 = static_cast<Node48 *>(self);
|
|
|
|
auto *self48 = static_cast<Node48 *>(self);
|
|
|
|
auto *newSelf = allocators->node256.allocate();
|
|
|
|
auto *newSelf = allocators->node256.allocate();
|
|
|
|
memcpy((void *)newSelf, self, offsetof(Node, type));
|
|
|
|
memcpy((char *)newSelf + sizeof(Node::type),
|
|
|
|
|
|
|
|
(char *)self + sizeof(Node::type),
|
|
|
|
|
|
|
|
sizeof(Node) - sizeof(Node::type));
|
|
|
|
newSelf->bitSet = self48->bitSet;
|
|
|
|
newSelf->bitSet = self48->bitSet;
|
|
|
|
newSelf->bitSet.forEachInRange(
|
|
|
|
newSelf->bitSet.forEachInRange(
|
|
|
|
[&](int i) {
|
|
|
|
[&](int i) {
|
|
|
@@ -888,7 +897,7 @@ bytes:
|
|
|
|
|
|
|
|
|
|
|
|
int longestCommonPrefixPartialKey(const uint8_t *ap, const uint8_t *bp,
|
|
|
|
int longestCommonPrefixPartialKey(const uint8_t *ap, const uint8_t *bp,
|
|
|
|
int cl) {
|
|
|
|
int cl) {
|
|
|
|
assert(cl <= Node::kPartialKeyMaxLen);
|
|
|
|
assert(cl <= kPartialKeyMaxLenEntryPresent + int(sizeof(Entry)));
|
|
|
|
int i = 0;
|
|
|
|
int i = 0;
|
|
|
|
for (; i < cl; ++i) {
|
|
|
|
for (; i < cl; ++i) {
|
|
|
|
if (*ap++ != *bp++) {
|
|
|
|
if (*ap++ != *bp++) {
|
|
|
@@ -1515,10 +1524,10 @@ bool checkRangeRead(Node *n, std::span<const uint8_t> begin,
|
|
|
|
return checkRangeLeftSide.ok & checkRangeRightSide.ok;
|
|
|
|
return checkRangeLeftSide.ok & checkRangeRightSide.ok;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Returns a pointer to the newly inserted node. caller is reponsible for
|
|
|
|
// Returns a pointer to the newly inserted node. Caller must set
|
|
|
|
// setting 'entry' fields and `maxVersion` on the result, which may have
|
|
|
|
// `entryPresent`, `entry` fields and `maxVersion` on the result. The search
|
|
|
|
// !entryPresent. The search path of the result's parent will have
|
|
|
|
// path of the result's parent will have `maxVersion` at least `writeVersion` as
|
|
|
|
// `maxVersion` at least `writeVersion` as a postcondition.
|
|
|
|
// a postcondition.
|
|
|
|
template <bool kBegin>
|
|
|
|
template <bool kBegin>
|
|
|
|
[[nodiscard]] Node *insert(Node **self, std::span<const uint8_t> key,
|
|
|
|
[[nodiscard]] Node *insert(Node **self, std::span<const uint8_t> key,
|
|
|
|
int64_t writeVersion, NodeAllocators *allocators,
|
|
|
|
int64_t writeVersion, NodeAllocators *allocators,
|
|
|
@@ -1537,7 +1546,9 @@ template <bool kBegin>
|
|
|
|
|
|
|
|
|
|
|
|
*self = allocators->node1.allocate();
|
|
|
|
*self = allocators->node1.allocate();
|
|
|
|
|
|
|
|
|
|
|
|
memcpy((void *)*self, old, offsetof(Node, type));
|
|
|
|
memcpy((char *)*self + sizeof(Node::type),
|
|
|
|
|
|
|
|
(char *)old + sizeof(Node::type),
|
|
|
|
|
|
|
|
sizeof(Node) - sizeof(Node::type));
|
|
|
|
(*self)->partialKeyLen = partialKeyIndex;
|
|
|
|
(*self)->partialKeyLen = partialKeyIndex;
|
|
|
|
(*self)->entryPresent = false;
|
|
|
|
(*self)->entryPresent = false;
|
|
|
|
(*self)->numChildren = 0;
|
|
|
|
(*self)->numChildren = 0;
|
|
|
@@ -1557,8 +1568,12 @@ template <bool kBegin>
|
|
|
|
} else {
|
|
|
|
} else {
|
|
|
|
// Consider adding a partial key
|
|
|
|
// Consider adding a partial key
|
|
|
|
if ((*self)->numChildren == 0 && !(*self)->entryPresent) {
|
|
|
|
if ((*self)->numChildren == 0 && !(*self)->entryPresent) {
|
|
|
|
(*self)->partialKeyLen =
|
|
|
|
const bool willNotBePresent =
|
|
|
|
std::min<int>(key.size(), (*self)->kPartialKeyMaxLen);
|
|
|
|
key.size() > kPartialKeyMaxLenEntryPresent + int(sizeof(Entry));
|
|
|
|
|
|
|
|
(*self)->partialKeyLen = std::min<int>(
|
|
|
|
|
|
|
|
key.size(), willNotBePresent
|
|
|
|
|
|
|
|
? kPartialKeyMaxLenEntryPresent + int(sizeof(Entry))
|
|
|
|
|
|
|
|
: kPartialKeyMaxLenEntryPresent);
|
|
|
|
memcpy((*self)->partialKey, key.data(), (*self)->partialKeyLen);
|
|
|
|
memcpy((*self)->partialKey, key.data(), (*self)->partialKeyLen);
|
|
|
|
key = key.subspan((*self)->partialKeyLen,
|
|
|
|
key = key.subspan((*self)->partialKeyLen,
|
|
|
|
key.size() - (*self)->partialKeyLen);
|
|
|
|
key.size() - (*self)->partialKeyLen);
|
|
|
@@ -1713,6 +1728,7 @@ void addWriteRange(Node *&root, int64_t oldestVersion,
|
|
|
|
if (insertedEnd) {
|
|
|
|
if (insertedEnd) {
|
|
|
|
// beginNode may have been invalidated
|
|
|
|
// beginNode may have been invalidated
|
|
|
|
beginNode = insert<true>(useAsRoot, begin, writeVersion, allocators, impl);
|
|
|
|
beginNode = insert<true>(useAsRoot, begin, writeVersion, allocators, impl);
|
|
|
|
|
|
|
|
assert(beginNode->entryPresent);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for (beginNode = nextLogical(beginNode); beginNode != endNode;) {
|
|
|
|
for (beginNode = nextLogical(beginNode); beginNode != endNode;) {
|
|
|
@@ -2280,7 +2296,8 @@ int main(void) {
|
|
|
|
#ifdef ENABLE_FUZZ
|
|
|
|
#ifdef ENABLE_FUZZ
|
|
|
|
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
|
|
|
|
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
|
|
|
|
TestDriver<ConflictSet::Impl> driver{data, size};
|
|
|
|
TestDriver<ConflictSet::Impl> driver{data, size};
|
|
|
|
static_assert(driver.kMaxKeyLen > Node::kPartialKeyMaxLen);
|
|
|
|
static_assert(driver.kMaxKeyLen >
|
|
|
|
|
|
|
|
kPartialKeyMaxLenEntryPresent + sizeof(Entry));
|
|
|
|
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
for (;;) {
|
|
|
|
bool done = driver.next();
|
|
|
|
bool done = driver.next();
|
|
|
|