Consolidate duplicate test data

This commit is contained in:
2025-08-15 06:38:38 -04:00
parent a793db40f0
commit 469e44c6f6
5 changed files with 150 additions and 232 deletions

View File

@@ -67,16 +67,18 @@ add_executable(bench_arena_allocator benchmarks/bench_arena_allocator.cpp)
target_link_libraries(bench_arena_allocator nanobench)
target_include_directories(bench_arena_allocator PRIVATE src)
add_executable(bench_commit_request benchmarks/bench_commit_request.cpp
src/commit_request.cpp)
add_executable(
bench_commit_request benchmarks/bench_commit_request.cpp
benchmarks/test_data.cpp src/commit_request.cpp)
target_link_libraries(bench_commit_request nanobench weaseljson)
target_include_directories(bench_commit_request PRIVATE src)
target_include_directories(bench_commit_request PRIVATE src benchmarks)
add_executable(bench_parser_comparison benchmarks/bench_parser_comparison.cpp
src/commit_request.cpp)
add_executable(
bench_parser_comparison benchmarks/bench_parser_comparison.cpp
benchmarks/test_data.cpp src/commit_request.cpp)
target_link_libraries(bench_parser_comparison nanobench weaseljson
nlohmann_json::nlohmann_json)
target_include_directories(bench_parser_comparison PRIVATE src)
target_include_directories(bench_parser_comparison PRIVATE src benchmarks)
add_test(NAME arena_allocator_tests COMMAND test_arena_allocator)
add_test(NAME commit_request_tests COMMAND test_commit_request)

View File

@@ -1,122 +1,12 @@
#include "commit_request.hpp"
#include "test_data.hpp"
#include <nanobench.h>
#include <string>
// Sample JSON strings of varying complexity for benchmarking
const std::string SIMPLE_JSON = R"({
"request_id": "simple-test",
"leader_id": "leader123",
"read_version": 12345
})";
using namespace weaseldb::test_data;
const std::string MEDIUM_JSON = R"({
"request_id": "medium-test",
"leader_id": "leader456",
"read_version": 98765,
"preconditions": [
{
"type": "point_read",
"version": 98764,
"key": "dGVzdEtleQ=="
},
{
"type": "range_read",
"version": 98763,
"begin": "cmFuZ2VTdGFydA==",
"end": "cmFuZ2VFbmQ="
}
],
"operations": [
{
"type": "write",
"key": "d3JpdGVLZXk=",
"value": "d3JpdGVWYWx1ZQ=="
},
{
"type": "delete",
"key": "ZGVsZXRlS2V5"
}
]
})";
const std::string COMPLEX_JSON = R"({
"request_id": "complex-batch-operation-12345",
"leader_id": "leader789abcdef",
"read_version": 999999999,
"preconditions": [
{
"type": "point_read",
"version": 999999998,
"key": "cHJlY29uZGl0aW9uS2V5MQ=="
},
{
"type": "range_read",
"version": 999999997,
"begin": "cmFuZ2VQcmVjb25kaXRpb25CZWdpbg==",
"end": "cmFuZ2VQcmVjb25kaXRpb25FbmQ="
},
{
"type": "point_read",
"version": 999999996,
"key": "YW5vdGhlclByZWNvbmRpdGlvbktleQ=="
}
],
"operations": [
{
"type": "write",
"key": "b3BlcmF0aW9uS2V5MQ==",
"value": "bGFyZ2VPcGVyYXRpb25WYWx1ZVdpdGhMb3RzT2ZEYXRhSGVyZQ=="
},
{
"type": "write",
"key": "b3BlcmF0aW9uS2V5Mg==",
"value": "YW5vdGhlckxhcmdlVmFsdWVXaXRoRXZlbk1vcmVEYXRh"
},
{
"type": "delete",
"key": "ZGVsZXRlT3BlcmF0aW9uS2V5"
},
{
"type": "range_delete",
"begin": "cmFuZ2VEZWxldGVTdGFydA==",
"end": "cmFuZ2VEZWxldGVFbmQ="
},
{
"type": "write",
"key": "ZmluYWxPcGVyYXRpb25LZXk=",
"value": "ZmluYWxPcGVyYXRpb25WYWx1ZVdpdGhMb25nZXJEYXRhRm9yVGVzdGluZw=="
}
]
})";
// Generate a large JSON with many operations for stress testing
std::string generate_large_json(int num_operations) {
std::string json = R"({
"request_id": "large-batch-)" +
std::to_string(num_operations) + R"(",
"leader_id": "stress-test-leader",
"read_version": 1000000,
"operations": [)";
for (int i = 0; i < num_operations; ++i) {
if (i > 0)
json += ",";
json += R"(
{
"type": "write",
"key": ")" +
std::string("key") + std::to_string(i) + R"(",
"value": ")" +
std::string("value") + std::to_string(i) + R"("
})";
}
json += R"(
]
})";
return json;
}
// JSON test data is now provided by test_data.hpp
int main() {
// One-shot parsing benchmarks

View File

@@ -1,124 +1,14 @@
#include "commit_request.hpp"
#include "test_data.hpp"
#include <iostream>
#include <nanobench.h>
#include <nlohmann/json.hpp>
#include <string>
// Sample JSON strings for benchmarking - same as commit_request benchmark
const std::string SIMPLE_JSON = R"({
"request_id": "simple-test",
"leader_id": "leader123",
"read_version": 12345
})";
using namespace weaseldb::test_data;
const std::string MEDIUM_JSON = R"({
"request_id": "medium-test",
"leader_id": "leader456",
"read_version": 98765,
"preconditions": [
{
"type": "point_read",
"version": 98764,
"key": "dGVzdEtleQ=="
},
{
"type": "range_read",
"version": 98763,
"begin": "cmFuZ2VTdGFydA==",
"end": "cmFuZ2VFbmQ="
}
],
"operations": [
{
"type": "write",
"key": "d3JpdGVLZXk=",
"value": "d3JpdGVWYWx1ZQ=="
},
{
"type": "delete",
"key": "ZGVsZXRlS2V5"
}
]
})";
const std::string COMPLEX_JSON = R"({
"request_id": "complex-batch-operation-12345",
"leader_id": "leader789abcdef",
"read_version": 999999999,
"preconditions": [
{
"type": "point_read",
"version": 999999998,
"key": "cHJlY29uZGl0aW9uS2V5MQ=="
},
{
"type": "range_read",
"version": 999999997,
"begin": "cmFuZ2VQcmVjb25kaXRpb25CZWdpbg==",
"end": "cmFuZ2VQcmVjb25kaXRpb25FbmQ="
},
{
"type": "point_read",
"version": 999999996,
"key": "YW5vdGhlclByZWNvbmRpdGlvbktleQ=="
}
],
"operations": [
{
"type": "write",
"key": "b3BlcmF0aW9uS2V5MQ==",
"value": "bGFyZ2VPcGVyYXRpb25WYWx1ZVdpdGhMb3RzT2ZEYXRhSGVyZQ=="
},
{
"type": "write",
"key": "b3BlcmF0aW9uS2V5Mg==",
"value": "YW5vdGhlckxhcmdlVmFsdWVXaXRoRXZlbk1vcmVEYXRh"
},
{
"type": "delete",
"key": "ZGVsZXRlT3BlcmF0aW9uS2V5"
},
{
"type": "range_delete",
"begin": "cmFuZ2VEZWxldGVTdGFydA==",
"end": "cmFuZ2VEZWxldGVFbmQ="
},
{
"type": "write",
"key": "ZmluYWxPcGVyYXRpb25LZXk=",
"value": "ZmluYWxPcGVyYXRpb25WYWx1ZVdpdGhMb25nZXJEYXRhRm9yVGVzdGluZw=="
}
]
})";
// Generate a large JSON with many operations for stress testing
std::string generate_large_json(int num_operations) {
std::string json = R"({
"request_id": "large-batch-)" +
std::to_string(num_operations) + R"(",
"leader_id": "stress-test-leader",
"read_version": 1000000,
"operations": [)";
for (int i = 0; i < num_operations; ++i) {
if (i > 0)
json += ",";
json += R"(
{
"type": "write",
"key": ")" +
std::string("key") + std::to_string(i) + R"(",
"value": ")" +
std::string("value") + std::to_string(i) + R"("
})";
}
json += R"(
]
})";
return json;
}
// JSON test data is now provided by test_data.hpp
// Helper function to simulate validation work on nlohmann json object
bool validate_nlohmann_commit_request(const nlohmann::json &j) {

120
benchmarks/test_data.cpp Normal file
View File

@@ -0,0 +1,120 @@
#include "test_data.hpp"
namespace weaseldb::test_data {
// Sample JSON strings of varying complexity for benchmarking and testing
const std::string SIMPLE_JSON = R"({
"request_id": "simple-test",
"leader_id": "leader123",
"read_version": 12345
})";
const std::string MEDIUM_JSON = R"({
"request_id": "medium-test",
"leader_id": "leader456",
"read_version": 98765,
"preconditions": [
{
"type": "point_read",
"version": 98764,
"key": "dGVzdEtleQ=="
},
{
"type": "range_read",
"version": 98763,
"begin": "cmFuZ2VTdGFydA==",
"end": "cmFuZ2VFbmQ="
}
],
"operations": [
{
"type": "write",
"key": "d3JpdGVLZXk=",
"value": "d3JpdGVWYWx1ZQ=="
},
{
"type": "delete",
"key": "ZGVsZXRlS2V5"
}
]
})";
const std::string COMPLEX_JSON = R"({
"request_id": "complex-batch-operation-12345",
"leader_id": "leader789abcdef",
"read_version": 999999999,
"preconditions": [
{
"type": "point_read",
"version": 999999998,
"key": "cHJlY29uZGl0aW9uS2V5MQ=="
},
{
"type": "range_read",
"version": 999999997,
"begin": "cmFuZ2VQcmVjb25kaXRpb25CZWdpbg==",
"end": "cmFuZ2VQcmVjb25kaXRpb25FbmQ="
},
{
"type": "point_read",
"version": 999999996,
"key": "YW5vdGhlclByZWNvbmRpdGlvbktleQ=="
}
],
"operations": [
{
"type": "write",
"key": "b3BlcmF0aW9uS2V5MQ==",
"value": "bGFyZ2VPcGVyYXRpb25WYWx1ZVdpdGhMb3RzT2ZEYXRhSGVyZQ=="
},
{
"type": "write",
"key": "b3BlcmF0aW9uS2V5Mg==",
"value": "YW5vdGhlckxhcmdlVmFsdWVXaXRoRXZlbk1vcmVEYXRh"
},
{
"type": "delete",
"key": "ZGVsZXRlT3BlcmF0aW9uS2V5"
},
{
"type": "range_delete",
"begin": "cmFuZ2VEZWxldGVTdGFydA==",
"end": "cmFuZ2VEZWxldGVFbmQ="
},
{
"type": "write",
"key": "ZmluYWxPcGVyYXRpb25LZXk=",
"value": "ZmluYWxPcGVyYXRpb25WYWx1ZVdpdGhMb25nZXJEYXRhRm9yVGVzdGluZw=="
}
]
})";
// Generate a large JSON with many operations for stress testing
std::string generate_large_json(int num_operations) {
std::string json = R"({
"request_id": "large-batch-)" +
std::to_string(num_operations) + R"(",
"leader_id": "stress-test-leader",
"read_version": 1000000,
"operations": [)";
for (int i = 0; i < num_operations; ++i) {
if (i > 0)
json += ",";
json += R"(
{
"type": "write",
"key": ")" +
std::string("key") + std::to_string(i) + R"(",
"value": ")" +
std::string("value") + std::to_string(i) + R"("
})";
}
json += R"(
]
})";
return json;
}
} // namespace weaseldb::test_data

16
benchmarks/test_data.hpp Normal file
View File

@@ -0,0 +1,16 @@
#pragma once
#include <string>
namespace weaseldb::test_data {
// Sample JSON strings of varying complexity for benchmarking and testing
// Declarations only - definitions in test_data.cpp to avoid ODR violations
extern const std::string SIMPLE_JSON;
extern const std::string MEDIUM_JSON;
extern const std::string COMPLEX_JSON;
// Generate a large JSON with many operations for stress testing
std::string generate_large_json(int num_operations);
} // namespace weaseldb::test_data