Measure per byte in benchmarks and remove some of them
This commit is contained in:
@@ -13,9 +13,10 @@ int main() {
|
||||
// One-shot parsing benchmarks
|
||||
auto bench = ankerl::nanobench::Bench()
|
||||
.title("CommitRequest One-Shot Parsing")
|
||||
.unit("parse")
|
||||
.unit("byte")
|
||||
.warmup(100);
|
||||
|
||||
bench.batch(SIMPLE_JSON.size());
|
||||
// Simple JSON parsing
|
||||
bench.run("Simple JSON (3 fields)", [&] {
|
||||
CommitRequest request;
|
||||
@@ -27,6 +28,7 @@ int main() {
|
||||
ankerl::nanobench::doNotOptimizeAway(request.leader_id());
|
||||
});
|
||||
|
||||
bench.batch(MEDIUM_JSON.size());
|
||||
// Medium complexity JSON parsing
|
||||
bench.run("Medium JSON (2 preconditions, 2 operations)", [&] {
|
||||
CommitRequest request;
|
||||
@@ -38,6 +40,7 @@ int main() {
|
||||
ankerl::nanobench::doNotOptimizeAway(request.leader_id());
|
||||
});
|
||||
|
||||
bench.batch(COMPLEX_JSON.size());
|
||||
// Complex JSON parsing
|
||||
bench.run("Complex JSON (3 preconditions, 5 operations)", [&] {
|
||||
CommitRequest request;
|
||||
@@ -52,6 +55,7 @@ int main() {
|
||||
// Large batch operations
|
||||
for (int num_ops : {10, 50, 100, 500}) {
|
||||
std::string large_json = generate_large_json(num_ops);
|
||||
bench.batch(large_json.size());
|
||||
bench.run("Large JSON (" + std::to_string(num_ops) + " operations)", [&] {
|
||||
CommitRequest request;
|
||||
JsonCommitRequestParser parser;
|
||||
@@ -66,7 +70,8 @@ int main() {
|
||||
// Streaming parsing benchmarks
|
||||
auto streaming_bench = ankerl::nanobench::Bench()
|
||||
.title("CommitRequest Streaming Parsing")
|
||||
.unit("parse")
|
||||
.unit("byte")
|
||||
.batch(MEDIUM_JSON.size())
|
||||
.warmup(50);
|
||||
|
||||
// Streaming with different chunk sizes
|
||||
@@ -102,67 +107,5 @@ int main() {
|
||||
});
|
||||
}
|
||||
|
||||
// Reset and reuse benchmarks
|
||||
auto reuse_bench = ankerl::nanobench::Bench()
|
||||
.title("CommitRequest Reset and Reuse")
|
||||
.unit("operation")
|
||||
.warmup(50);
|
||||
|
||||
reuse_bench.run("Parse -> Reset -> Parse cycle", [&] {
|
||||
static CommitRequest request; // Static to persist across invocations
|
||||
static JsonCommitRequestParser parser;
|
||||
|
||||
std::string mutable_json1 = SIMPLE_JSON;
|
||||
bool result1 =
|
||||
parser.parse(request, mutable_json1.data(), mutable_json1.size());
|
||||
|
||||
request.reset();
|
||||
|
||||
std::string mutable_json2 = MEDIUM_JSON;
|
||||
bool result2 =
|
||||
parser.parse(request, mutable_json2.data(), mutable_json2.size());
|
||||
|
||||
ankerl::nanobench::doNotOptimizeAway(result1);
|
||||
ankerl::nanobench::doNotOptimizeAway(result2);
|
||||
ankerl::nanobench::doNotOptimizeAway(request.leader_id());
|
||||
});
|
||||
|
||||
// Base64 decoding performance
|
||||
auto base64_bench = ankerl::nanobench::Bench()
|
||||
.title("Base64 Decoding Performance")
|
||||
.unit("decode")
|
||||
.warmup(50);
|
||||
|
||||
// JSON with lots of base64 encoded data
|
||||
std::string base64_heavy_json = R"({
|
||||
"leader_id": "base64-test-leader",
|
||||
"read_version": 12345,
|
||||
"operations": [)";
|
||||
|
||||
for (int i = 0; i < 20; ++i) {
|
||||
if (i > 0)
|
||||
base64_heavy_json += ",";
|
||||
base64_heavy_json += R"(
|
||||
{
|
||||
"type": "write",
|
||||
"key": "VGhpc0lzQUxvbmdCYXNlNjRFbmNvZGVkS2V5V2l0aExvdHNPZkRhdGFGb3JUZXN0aW5nUHVycG9zZXM=",
|
||||
"value": "VGhpc0lzQW5FdmVuTG9uZ2VyQmFzZTY0RW5jb2RlZFZhbHVlV2l0aEV2ZW5Nb3JlRGF0YUZvclRlc3RpbmdUaGVCYXNlNjREZWNvZGluZ1BlcmZvcm1hbmNlT2ZUaGVQYXJzZXI="
|
||||
})";
|
||||
}
|
||||
base64_heavy_json += R"(
|
||||
]
|
||||
})";
|
||||
|
||||
base64_bench.run(
|
||||
"Heavy Base64 JSON (20 operations with long encoded data)", [&] {
|
||||
CommitRequest request;
|
||||
JsonCommitRequestParser parser;
|
||||
std::string mutable_json = base64_heavy_json;
|
||||
bool result =
|
||||
parser.parse(request, mutable_json.data(), mutable_json.size());
|
||||
ankerl::nanobench::doNotOptimizeAway(result);
|
||||
ankerl::nanobench::doNotOptimizeAway(request.leader_id());
|
||||
});
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -463,7 +463,8 @@ int main() {
|
||||
// Simple JSON comparison
|
||||
auto simple_bench = ankerl::nanobench::Bench()
|
||||
.title("Simple JSON Parsing Comparison")
|
||||
.unit("parse")
|
||||
.unit("byte")
|
||||
.batch(SIMPLE_JSON.size())
|
||||
.warmup(100)
|
||||
.minEpochIterations(1000);
|
||||
|
||||
@@ -542,7 +543,8 @@ int main() {
|
||||
// Medium complexity JSON comparison
|
||||
auto medium_bench = ankerl::nanobench::Bench()
|
||||
.title("Medium JSON Parsing Comparison")
|
||||
.unit("parse")
|
||||
.unit("byte")
|
||||
.batch(MEDIUM_JSON.size())
|
||||
.warmup(100)
|
||||
.minEpochIterations(500);
|
||||
|
||||
@@ -621,7 +623,8 @@ int main() {
|
||||
// Complex JSON comparison
|
||||
auto complex_bench = ankerl::nanobench::Bench()
|
||||
.title("Complex JSON Parsing Comparison")
|
||||
.unit("parse")
|
||||
.unit("byte")
|
||||
.batch(COMPLEX_JSON.size())
|
||||
.warmup(100)
|
||||
.minEpochIterations(200);
|
||||
|
||||
@@ -697,16 +700,16 @@ int main() {
|
||||
ankerl::nanobench::doNotOptimizeAway(handler.operations.size());
|
||||
});
|
||||
|
||||
// Large batch operations comparison
|
||||
auto large_bench = ankerl::nanobench::Bench()
|
||||
.title("Large JSON Parsing Comparison")
|
||||
.unit("parse")
|
||||
.warmup(50)
|
||||
.minEpochIterations(100);
|
||||
|
||||
for (int num_ops : {50, 100, 500}) {
|
||||
std::string large_json = generate_large_json(num_ops);
|
||||
std::string bench_name = std::to_string(num_ops) + " operations";
|
||||
// Large batch operations comparison
|
||||
auto large_bench = ankerl::nanobench::Bench()
|
||||
.title("Large JSON Parsing Comparison")
|
||||
.unit("byte")
|
||||
.batch(large_json.size())
|
||||
.warmup(50)
|
||||
.minEpochIterations(100);
|
||||
|
||||
large_bench.run("WeaselDB Parser (" + bench_name + ")", [&] {
|
||||
CommitRequest request;
|
||||
@@ -783,133 +786,6 @@ int main() {
|
||||
});
|
||||
}
|
||||
|
||||
// Memory efficiency comparison
|
||||
auto memory_bench = ankerl::nanobench::Bench()
|
||||
.title("Memory Efficiency Comparison")
|
||||
.unit("allocation")
|
||||
.warmup(50)
|
||||
.minEpochIterations(200);
|
||||
|
||||
memory_bench.run("WeaselDB Parser (arena allocation)", [&] {
|
||||
CommitRequest request;
|
||||
JsonCommitRequestParser parser;
|
||||
std::string mutable_json = COMPLEX_JSON;
|
||||
bool result =
|
||||
parser.parse(request, mutable_json.data(), mutable_json.size());
|
||||
ankerl::nanobench::doNotOptimizeAway(result);
|
||||
ankerl::nanobench::doNotOptimizeAway(request.total_allocated());
|
||||
ankerl::nanobench::doNotOptimizeAway(request.used_bytes());
|
||||
});
|
||||
|
||||
memory_bench.run("nlohmann/json (standard allocation)", [&] {
|
||||
try {
|
||||
nlohmann::json j = nlohmann::json::parse(COMPLEX_JSON);
|
||||
bool result = validate_nlohmann_commit_request(j);
|
||||
ankerl::nanobench::doNotOptimizeAway(result);
|
||||
ankerl::nanobench::doNotOptimizeAway(j.size());
|
||||
} catch (const std::exception &) {
|
||||
ankerl::nanobench::doNotOptimizeAway(false);
|
||||
}
|
||||
});
|
||||
|
||||
memory_bench.run("RapidJSON SAX (standard allocation)", [&] {
|
||||
CommitRequestSaxHandler handler;
|
||||
rapidjson::Reader reader;
|
||||
rapidjson::StringStream ss(COMPLEX_JSON.c_str());
|
||||
bool result = reader.Parse(ss, handler);
|
||||
result = result && handler.validate();
|
||||
ankerl::nanobench::doNotOptimizeAway(result);
|
||||
ankerl::nanobench::doNotOptimizeAway(handler.preconditions.size());
|
||||
ankerl::nanobench::doNotOptimizeAway(handler.operations.size());
|
||||
});
|
||||
|
||||
memory_bench.run("RapidJSON SAX Arena (arena allocation)", [&] {
|
||||
CommitRequestArenaHandler handler;
|
||||
rapidjson::Reader reader;
|
||||
rapidjson::StringStream ss(COMPLEX_JSON.c_str());
|
||||
bool result = reader.Parse(ss, handler);
|
||||
result = result && handler.validate();
|
||||
ankerl::nanobench::doNotOptimizeAway(result);
|
||||
ankerl::nanobench::doNotOptimizeAway(handler.total_allocated());
|
||||
ankerl::nanobench::doNotOptimizeAway(handler.used_bytes());
|
||||
});
|
||||
|
||||
// Reset and reuse comparison
|
||||
auto reuse_bench = ankerl::nanobench::Bench()
|
||||
.title("Reset and Reuse Comparison")
|
||||
.unit("cycle")
|
||||
.warmup(50)
|
||||
.minEpochIterations(100);
|
||||
|
||||
reuse_bench.run("WeaselDB Parser (reset)", [&] {
|
||||
static CommitRequest request;
|
||||
static JsonCommitRequestParser parser;
|
||||
|
||||
std::string mutable_json1 = SIMPLE_JSON;
|
||||
bool result1 =
|
||||
parser.parse(request, mutable_json1.data(), mutable_json1.size());
|
||||
|
||||
request.reset();
|
||||
|
||||
std::string mutable_json2 = MEDIUM_JSON;
|
||||
bool result2 =
|
||||
parser.parse(request, mutable_json2.data(), mutable_json2.size());
|
||||
|
||||
ankerl::nanobench::doNotOptimizeAway(result1);
|
||||
ankerl::nanobench::doNotOptimizeAway(result2);
|
||||
});
|
||||
|
||||
reuse_bench.run("nlohmann/json (new instance)", [&] {
|
||||
try {
|
||||
nlohmann::json j1 = nlohmann::json::parse(SIMPLE_JSON);
|
||||
bool result1 = validate_nlohmann_commit_request(j1);
|
||||
|
||||
nlohmann::json j2 = nlohmann::json::parse(MEDIUM_JSON);
|
||||
bool result2 = validate_nlohmann_commit_request(j2);
|
||||
|
||||
ankerl::nanobench::doNotOptimizeAway(result1);
|
||||
ankerl::nanobench::doNotOptimizeAway(result2);
|
||||
} catch (const std::exception &) {
|
||||
ankerl::nanobench::doNotOptimizeAway(false);
|
||||
}
|
||||
});
|
||||
|
||||
reuse_bench.run("RapidJSON SAX (reset)", [&] {
|
||||
static CommitRequestSaxHandler handler;
|
||||
|
||||
rapidjson::Reader reader;
|
||||
rapidjson::StringStream ss1(SIMPLE_JSON.c_str());
|
||||
bool result1 = reader.Parse(ss1, handler);
|
||||
result1 = result1 && handler.validate();
|
||||
|
||||
handler.reset();
|
||||
|
||||
rapidjson::StringStream ss2(MEDIUM_JSON.c_str());
|
||||
bool result2 = reader.Parse(ss2, handler);
|
||||
result2 = result2 && handler.validate();
|
||||
|
||||
ankerl::nanobench::doNotOptimizeAway(result1);
|
||||
ankerl::nanobench::doNotOptimizeAway(result2);
|
||||
});
|
||||
|
||||
reuse_bench.run("RapidJSON SAX Arena (reset)", [&] {
|
||||
static CommitRequestArenaHandler handler;
|
||||
|
||||
rapidjson::Reader reader;
|
||||
rapidjson::StringStream ss1(SIMPLE_JSON.c_str());
|
||||
bool result1 = reader.Parse(ss1, handler);
|
||||
result1 = result1 && handler.validate();
|
||||
|
||||
handler.reset();
|
||||
|
||||
rapidjson::StringStream ss2(MEDIUM_JSON.c_str());
|
||||
bool result2 = reader.Parse(ss2, handler);
|
||||
result2 = result2 && handler.validate();
|
||||
|
||||
ankerl::nanobench::doNotOptimizeAway(result1);
|
||||
ankerl::nanobench::doNotOptimizeAway(result2);
|
||||
});
|
||||
|
||||
std::cout << "\nBenchmark completed. The WeaselDB parser is optimized for:\n";
|
||||
std::cout << "- Arena-based memory allocation for reduced fragmentation\n";
|
||||
std::cout << "- Streaming parsing for network protocols\n";
|
||||
|
||||
Reference in New Issue
Block a user