Compare commits
7 Commits
8b828be0a9
...
7006012aeb
| Author | SHA1 | Date | |
|---|---|---|---|
| 7006012aeb | |||
| 3d573694c4 | |||
| 87bbb47787 | |||
| d502f66bb4 | |||
| 31e751fe75 | |||
| 953ec3ad43 | |||
| 8326c67b9c |
@@ -9,6 +9,7 @@
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
#include <new>
|
||||
#include <span>
|
||||
#include <type_traits>
|
||||
#include <typeinfo>
|
||||
#include <utility>
|
||||
@@ -301,38 +302,87 @@ public:
|
||||
new_size * sizeof(T), alignof(T)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Smart pointer for arena-allocated objects with non-trivial
|
||||
* destructors.
|
||||
*
|
||||
* ArenaAllocator::Ptr calls the destructor but does not free memory (assumes
|
||||
* arena allocation). This provides RAII semantics for objects that need
|
||||
* cleanup without the overhead of individual memory deallocation.
|
||||
*
|
||||
* @tparam T The type of object being managed
|
||||
*/
|
||||
template <typename T> struct Ptr {
|
||||
Ptr() noexcept : ptr_(nullptr) {}
|
||||
|
||||
explicit Ptr(T *ptr) noexcept : ptr_(ptr) {}
|
||||
|
||||
Ptr(const Ptr &) = delete;
|
||||
Ptr &operator=(const Ptr &) = delete;
|
||||
|
||||
Ptr(Ptr &&other) noexcept : ptr_(other.ptr_) { other.ptr_ = nullptr; }
|
||||
|
||||
Ptr &operator=(Ptr &&other) noexcept {
|
||||
if (this != &other) {
|
||||
reset();
|
||||
ptr_ = other.ptr_;
|
||||
other.ptr_ = nullptr;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
~Ptr() { reset(); }
|
||||
|
||||
T *operator->() const noexcept { return ptr_; }
|
||||
T &operator*() const noexcept { return *ptr_; }
|
||||
|
||||
T *get() const noexcept { return ptr_; }
|
||||
|
||||
explicit operator bool() const noexcept { return ptr_ != nullptr; }
|
||||
|
||||
T *release() noexcept {
|
||||
T *result = ptr_;
|
||||
ptr_ = nullptr;
|
||||
return result;
|
||||
}
|
||||
|
||||
void reset(T *new_ptr = nullptr) noexcept {
|
||||
if (ptr_) {
|
||||
ptr_->~T();
|
||||
}
|
||||
ptr_ = new_ptr;
|
||||
}
|
||||
|
||||
private:
|
||||
T *ptr_;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Construct an object of type T in the arena using placement new.
|
||||
*
|
||||
* This is a convenience method that combines allocation with in-place
|
||||
* construction. It properly handles alignment requirements for type T.
|
||||
* This method returns different types based on whether T is trivially
|
||||
* destructible:
|
||||
* - For trivially destructible types: returns T* (raw pointer)
|
||||
* - For non-trivially destructible types: returns ArenaAllocator::Ptr<T>
|
||||
* (smart pointer that calls destructor)
|
||||
*
|
||||
* @tparam T The type of object to construct (must be trivially destructible)
|
||||
* @tparam T The type of object to construct
|
||||
* @tparam Args Types of constructor arguments
|
||||
* @param args Arguments to forward to T's constructor
|
||||
* @return Pointer to the constructed object
|
||||
* @return T* for trivially destructible types, ArenaAllocator::Ptr<T>
|
||||
* otherwise
|
||||
* @note Prints error to stderr and calls std::abort() if memory allocation
|
||||
* fails
|
||||
*
|
||||
* ## Type Requirements:
|
||||
* T must be trivially destructible (std::is_trivially_destructible_v<T>).
|
||||
* This prevents subtle bugs since destructors are never called for objects
|
||||
* constructed in the arena.
|
||||
*
|
||||
*
|
||||
* ## Note:
|
||||
* Objects constructed this way cannot be individually destroyed.
|
||||
* Their destructors will NOT be called automatically - hence the requirement
|
||||
* for trivially destructible types.
|
||||
*/
|
||||
template <typename T, typename... Args> T *construct(Args &&...args) {
|
||||
static_assert(
|
||||
std::is_trivially_destructible_v<T>,
|
||||
"ArenaAllocator::construct requires trivially destructible types. "
|
||||
"Objects constructed in the arena will not have their destructors "
|
||||
"called.");
|
||||
template <typename T, typename... Args> auto construct(Args &&...args) {
|
||||
void *ptr = allocate_raw(sizeof(T), alignof(T));
|
||||
return new (ptr) T(std::forward<Args>(args)...);
|
||||
T *obj = new (ptr) T(std::forward<Args>(args)...);
|
||||
|
||||
if constexpr (std::is_trivially_destructible_v<T>) {
|
||||
return obj;
|
||||
} else {
|
||||
return Ptr<T>(obj);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -639,6 +689,12 @@ template <typename T> struct ArenaVector {
|
||||
|
||||
void clear() { size_ = 0; }
|
||||
|
||||
// Implicit conversion to std::span
|
||||
operator std::span<T>() { return std::span<T>(data_, size_); }
|
||||
operator std::span<const T>() const {
|
||||
return std::span<const T>(data_, size_);
|
||||
}
|
||||
|
||||
// Iterator support for range-based for loops
|
||||
T *begin() { return data_; }
|
||||
const T *begin() const { return data_; }
|
||||
|
||||
580
src/metric.cpp
580
src/metric.cpp
@@ -11,7 +11,9 @@
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <functional>
|
||||
#include <map>
|
||||
#include <mutex>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <thread>
|
||||
#include <type_traits>
|
||||
@@ -168,6 +170,21 @@ struct LabelsKey {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool operator<(const LabelsKey &other) const {
|
||||
if (labels.size() != other.labels.size()) {
|
||||
return labels.size() < other.labels.size();
|
||||
}
|
||||
for (size_t i = 0; i < labels.size(); ++i) {
|
||||
if (labels[i].first != other.labels[i].first) {
|
||||
return labels[i].first < other.labels[i].first;
|
||||
}
|
||||
if (labels[i].second != other.labels[i].second) {
|
||||
return labels[i].second < other.labels[i].second;
|
||||
}
|
||||
}
|
||||
return false; // They are equal
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace metric
|
||||
@@ -214,9 +231,8 @@ template <> struct Family<Counter>::State {
|
||||
global_accumulated_values;
|
||||
|
||||
// Callback-based metrics (global, not per-thread)
|
||||
std::unordered_map<
|
||||
LabelsKey, MetricCallback<Counter>, std::hash<LabelsKey>,
|
||||
std::equal_to<LabelsKey>,
|
||||
std::map<
|
||||
LabelsKey, MetricCallback<Counter>, std::less<LabelsKey>,
|
||||
ArenaStlAllocator<std::pair<const LabelsKey, MetricCallback<Counter>>>>
|
||||
callbacks;
|
||||
|
||||
@@ -238,9 +254,7 @@ template <> struct Family<Gauge>::State {
|
||||
instances;
|
||||
|
||||
// Callback-based metrics
|
||||
std::unordered_map<
|
||||
LabelsKey, MetricCallback<Gauge>, std::hash<LabelsKey>,
|
||||
std::equal_to<LabelsKey>,
|
||||
std::map<LabelsKey, MetricCallback<Gauge>, std::less<LabelsKey>,
|
||||
ArenaStlAllocator<std::pair<const LabelsKey, MetricCallback<Gauge>>>>
|
||||
callbacks;
|
||||
|
||||
@@ -321,9 +335,8 @@ struct Metric {
|
||||
|
||||
// Function-local statics to avoid static initialization order fiasco
|
||||
static auto &get_counter_families() {
|
||||
using FamilyMap = std::unordered_map<
|
||||
std::string_view, Family<Counter>::State *, std::hash<std::string_view>,
|
||||
std::equal_to<std::string_view>,
|
||||
using FamilyMap = std::map<
|
||||
std::string_view, Family<Counter>::State *, std::less<std::string_view>,
|
||||
ArenaStlAllocator<
|
||||
std::pair<const std::string_view, Family<Counter>::State *>>>;
|
||||
static FamilyMap *counterFamilies = new FamilyMap(
|
||||
@@ -334,9 +347,8 @@ struct Metric {
|
||||
}
|
||||
|
||||
static auto &get_gauge_families() {
|
||||
using FamilyMap = std::unordered_map<
|
||||
std::string_view, Family<Gauge>::State *, std::hash<std::string_view>,
|
||||
std::equal_to<std::string_view>,
|
||||
using FamilyMap = std::map<
|
||||
std::string_view, Family<Gauge>::State *, std::less<std::string_view>,
|
||||
ArenaStlAllocator<
|
||||
std::pair<const std::string_view, Family<Gauge>::State *>>>;
|
||||
static FamilyMap *gaugeFamilies = new FamilyMap(
|
||||
@@ -347,11 +359,11 @@ struct Metric {
|
||||
}
|
||||
|
||||
static auto &get_histogram_families() {
|
||||
using FamilyMap = std::unordered_map<
|
||||
std::string_view, Family<Histogram>::State *,
|
||||
std::hash<std::string_view>, std::equal_to<std::string_view>,
|
||||
ArenaStlAllocator<
|
||||
std::pair<const std::string_view, Family<Histogram>::State *>>>;
|
||||
using FamilyMap =
|
||||
std::map<std::string_view, Family<Histogram>::State *,
|
||||
std::less<std::string_view>,
|
||||
ArenaStlAllocator<std::pair<const std::string_view,
|
||||
Family<Histogram>::State *>>>;
|
||||
static FamilyMap *histogramFamilies = new FamilyMap(
|
||||
ArenaStlAllocator<
|
||||
std::pair<const std::string_view, Family<Histogram>::State *>>(
|
||||
@@ -573,6 +585,152 @@ struct Metric {
|
||||
result.p = ptr;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Pre-computed data structures with resolved pointers to eliminate hash
|
||||
// lookups
|
||||
struct CounterLabelData {
|
||||
LabelsKey labels_key;
|
||||
std::vector<Counter::State *> thread_states; // Pre-resolved pointers
|
||||
Counter::State *global_state; // Pre-resolved global state pointer
|
||||
|
||||
CounterLabelData(const LabelsKey &key)
|
||||
: labels_key(key), global_state(nullptr) {}
|
||||
};
|
||||
|
||||
struct GaugeLabelData {
|
||||
LabelsKey labels_key;
|
||||
Gauge::State *instance_state; // Direct pointer to gauge instance
|
||||
|
||||
GaugeLabelData(const LabelsKey &key)
|
||||
: labels_key(key), instance_state(nullptr) {}
|
||||
};
|
||||
|
||||
struct HistogramLabelData {
|
||||
LabelsKey labels_key;
|
||||
std::vector<Histogram::State *> thread_states; // Pre-resolved pointers
|
||||
Histogram::State *global_state; // Pre-resolved global state pointer
|
||||
size_t bucket_count; // Cache bucket count from family
|
||||
|
||||
HistogramLabelData(const LabelsKey &key)
|
||||
: labels_key(key), global_state(nullptr), bucket_count(0) {}
|
||||
};
|
||||
|
||||
// Pre-computed data for each family type, built once and reused
|
||||
struct LabelSets {
|
||||
std::vector<std::vector<CounterLabelData>> counter_data;
|
||||
std::vector<std::vector<GaugeLabelData>> gauge_data;
|
||||
std::vector<std::vector<HistogramLabelData>> histogram_data;
|
||||
};
|
||||
|
||||
// Build label sets once for reuse in both phases
|
||||
static LabelSets build_label_sets(ArenaAllocator &arena) {
|
||||
LabelSets label_sets;
|
||||
|
||||
// Build counter data with pre-resolved pointers
|
||||
for (const auto &[name, family] : Metric::get_counter_families()) {
|
||||
// Collect all unique labels first
|
||||
std::set<LabelsKey, std::less<LabelsKey>, ArenaStlAllocator<LabelsKey>>
|
||||
all_labels{ArenaStlAllocator<LabelsKey>(&arena)};
|
||||
|
||||
for (const auto &[thread_id, per_thread] : family->per_thread_state) {
|
||||
for (const auto &[labels_key, instance] : per_thread.instances) {
|
||||
all_labels.insert(labels_key);
|
||||
}
|
||||
}
|
||||
for (const auto &[labels_key, global_state] :
|
||||
family->global_accumulated_values) {
|
||||
if (global_state) {
|
||||
all_labels.insert(labels_key);
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-resolve all pointers for each label set
|
||||
std::vector<CounterLabelData> family_data;
|
||||
for (const auto &labels_key : all_labels) {
|
||||
CounterLabelData data(labels_key);
|
||||
|
||||
// Pre-resolve thread-local state pointers
|
||||
for (const auto &[thread_id, per_thread] : family->per_thread_state) {
|
||||
auto it = per_thread.instances.find(labels_key);
|
||||
if (it != per_thread.instances.end()) {
|
||||
data.thread_states.push_back(it->second);
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-resolve global accumulated state pointer
|
||||
auto global_it = family->global_accumulated_values.find(labels_key);
|
||||
data.global_state =
|
||||
(global_it != family->global_accumulated_values.end() &&
|
||||
global_it->second)
|
||||
? global_it->second
|
||||
: nullptr;
|
||||
|
||||
family_data.push_back(std::move(data));
|
||||
}
|
||||
label_sets.counter_data.push_back(std::move(family_data));
|
||||
}
|
||||
|
||||
// Build gauge data with pre-resolved pointers
|
||||
for (const auto &[name, family] : Metric::get_gauge_families()) {
|
||||
std::vector<GaugeLabelData> family_data;
|
||||
|
||||
// Gauges iterate directly over instances
|
||||
for (const auto &[labels_key, instance] : family->instances) {
|
||||
GaugeLabelData data(labels_key);
|
||||
data.instance_state = instance;
|
||||
family_data.push_back(std::move(data));
|
||||
}
|
||||
|
||||
label_sets.gauge_data.push_back(std::move(family_data));
|
||||
}
|
||||
|
||||
// Build histogram data with pre-resolved pointers
|
||||
for (const auto &[name, family] : Metric::get_histogram_families()) {
|
||||
// Collect all unique labels first
|
||||
std::set<LabelsKey, std::less<LabelsKey>, ArenaStlAllocator<LabelsKey>>
|
||||
all_labels{ArenaStlAllocator<LabelsKey>(&arena)};
|
||||
|
||||
for (const auto &[thread_id, per_thread] : family->per_thread_state) {
|
||||
for (const auto &[labels_key, instance] : per_thread.instances) {
|
||||
all_labels.insert(labels_key);
|
||||
}
|
||||
}
|
||||
for (const auto &[labels_key, global_state] :
|
||||
family->global_accumulated_values) {
|
||||
if (global_state) {
|
||||
all_labels.insert(labels_key);
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-resolve all pointers for each label set
|
||||
std::vector<HistogramLabelData> family_data;
|
||||
for (const auto &labels_key : all_labels) {
|
||||
HistogramLabelData data(labels_key);
|
||||
data.bucket_count = family->buckets.size(); // Cache bucket count
|
||||
|
||||
// Pre-resolve thread-local state pointers
|
||||
for (const auto &[thread_id, per_thread] : family->per_thread_state) {
|
||||
auto it = per_thread.instances.find(labels_key);
|
||||
if (it != per_thread.instances.end()) {
|
||||
data.thread_states.push_back(it->second);
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-resolve global accumulated state pointer
|
||||
auto global_it = family->global_accumulated_values.find(labels_key);
|
||||
data.global_state =
|
||||
(global_it != family->global_accumulated_values.end() &&
|
||||
global_it->second)
|
||||
? global_it->second
|
||||
: nullptr;
|
||||
|
||||
family_data.push_back(std::move(data));
|
||||
}
|
||||
label_sets.histogram_data.push_back(std::move(family_data));
|
||||
}
|
||||
|
||||
return label_sets;
|
||||
}
|
||||
};
|
||||
|
||||
Counter::Counter() = default;
|
||||
@@ -911,9 +1069,146 @@ bool is_valid_label_value(std::string_view value) {
|
||||
return simdutf::validate_utf8(value.data(), value.size());
|
||||
}
|
||||
|
||||
union MetricValue {
|
||||
double as_double;
|
||||
uint64_t as_uint64;
|
||||
};
|
||||
|
||||
// Phase 1: Compute all metric values in deterministic order
|
||||
static ArenaVector<MetricValue>
|
||||
compute_metric_values(ArenaAllocator &arena,
|
||||
const Metric::LabelSets &label_sets) {
|
||||
ArenaVector<MetricValue> values(&arena);
|
||||
|
||||
// Compute counter values - ITERATION ORDER MUST MATCH FORMAT PHASE
|
||||
size_t counter_family_idx = 0;
|
||||
for (const auto &[name, family] : Metric::get_counter_families()) {
|
||||
// Callback values
|
||||
for (const auto &[labels_key, callback] : family->callbacks) {
|
||||
auto value = callback();
|
||||
values.push_back({.as_double = value});
|
||||
}
|
||||
|
||||
// Use pre-computed data with resolved pointers - no hash lookups!
|
||||
const auto &family_data = label_sets.counter_data[counter_family_idx++];
|
||||
for (const auto &data : family_data) {
|
||||
double total_value = 0.0;
|
||||
|
||||
// Sum thread-local values using pre-resolved pointers
|
||||
for (auto *state_ptr : data.thread_states) {
|
||||
// Atomic read to match atomic store in Counter::inc()
|
||||
double value;
|
||||
__atomic_load(&state_ptr->value, &value, __ATOMIC_RELAXED);
|
||||
total_value += value;
|
||||
}
|
||||
|
||||
// Add global accumulated value using pre-resolved pointer
|
||||
if (data.global_state) {
|
||||
total_value += data.global_state->value;
|
||||
}
|
||||
|
||||
values.push_back({.as_double = total_value});
|
||||
}
|
||||
}
|
||||
|
||||
// Compute gauge values - ITERATION ORDER MUST MATCH FORMAT PHASE
|
||||
size_t gauge_family_idx = 0;
|
||||
for (const auto &[name, family] : Metric::get_gauge_families()) {
|
||||
// Callback values
|
||||
for (const auto &[labels_key, callback] : family->callbacks) {
|
||||
auto value = callback();
|
||||
values.push_back({.as_double = value});
|
||||
}
|
||||
|
||||
// Use pre-computed data with resolved pointers - no hash lookups!
|
||||
const auto &family_data = label_sets.gauge_data[gauge_family_idx++];
|
||||
for (const auto &data : family_data) {
|
||||
auto value = std::bit_cast<double>(
|
||||
data.instance_state->value.load(std::memory_order_relaxed));
|
||||
values.push_back({.as_double = value});
|
||||
}
|
||||
}
|
||||
|
||||
// Compute histogram values - ITERATION ORDER MUST MATCH FORMAT PHASE
|
||||
size_t histogram_family_idx = 0;
|
||||
for (const auto &family_pair : Metric::get_histogram_families()) {
|
||||
// Use pre-computed data with resolved pointers - no hash lookups!
|
||||
const auto &family_data = label_sets.histogram_data[histogram_family_idx++];
|
||||
|
||||
for (const auto &data : family_data) {
|
||||
size_t bucket_count = data.bucket_count; // Use cached bucket count
|
||||
|
||||
ArenaVector<uint64_t> total_counts(&arena);
|
||||
for (size_t i = 0; i < bucket_count; ++i) {
|
||||
total_counts.push_back(0);
|
||||
}
|
||||
double total_sum = 0.0;
|
||||
uint64_t total_observations = 0;
|
||||
|
||||
// Sum thread-local values using pre-resolved pointers
|
||||
for (auto *instance : data.thread_states) {
|
||||
// Extract data under lock - minimize critical section
|
||||
uint64_t *counts_snapshot = arena.allocate<uint64_t>(bucket_count);
|
||||
double sum_snapshot;
|
||||
uint64_t observations_snapshot;
|
||||
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(instance->mutex);
|
||||
for (size_t i = 0; i < instance->counts.size(); ++i) {
|
||||
counts_snapshot[i] = instance->counts[i];
|
||||
}
|
||||
sum_snapshot = instance->sum;
|
||||
observations_snapshot = instance->observations;
|
||||
}
|
||||
|
||||
// Add to totals
|
||||
for (size_t i = 0; i < bucket_count; ++i) {
|
||||
total_counts[i] += counts_snapshot[i];
|
||||
}
|
||||
total_sum += sum_snapshot;
|
||||
total_observations += observations_snapshot;
|
||||
}
|
||||
|
||||
// Add global accumulated value using pre-resolved pointer
|
||||
if (data.global_state) {
|
||||
auto *global_state = data.global_state;
|
||||
for (size_t i = 0; i < global_state->counts.size(); ++i) {
|
||||
total_counts[i] += global_state->counts[i];
|
||||
}
|
||||
total_sum += global_state->sum;
|
||||
total_observations += global_state->observations;
|
||||
}
|
||||
|
||||
// Store histogram values
|
||||
// Store explicit bucket counts
|
||||
for (size_t i = 0; i < total_counts.size(); ++i) {
|
||||
values.push_back({.as_uint64 = total_counts[i]});
|
||||
}
|
||||
// Store +Inf bucket (total observations)
|
||||
values.push_back({.as_uint64 = total_observations});
|
||||
// Store sum
|
||||
values.push_back({.as_double = total_sum});
|
||||
// Store count
|
||||
values.push_back({.as_uint64 = total_observations});
|
||||
}
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
// Phase 2: Format metrics using pre-computed values
|
||||
std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
// Hold lock throughout both phases to prevent registry changes
|
||||
std::unique_lock<std::mutex> _{Metric::mutex};
|
||||
|
||||
// Build label sets once for both phases
|
||||
Metric::LabelSets label_sets = Metric::build_label_sets(arena);
|
||||
|
||||
// Phase 1: Compute all metric values
|
||||
ArenaVector<MetricValue> metric_values =
|
||||
compute_metric_values(arena, label_sets);
|
||||
const MetricValue *next_value = metric_values.data();
|
||||
|
||||
ArenaVector<std::string_view> output(&arena);
|
||||
|
||||
auto format_labels =
|
||||
@@ -972,7 +1267,8 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
return std::string_view(buf, p - buf);
|
||||
};
|
||||
|
||||
// Render counters
|
||||
// Format counters - ITERATION ORDER MUST MATCH COMPUTE PHASE
|
||||
size_t counter_family_idx = 0;
|
||||
for (const auto &[name, family] : Metric::get_counter_families()) {
|
||||
output.push_back(format(arena, "# HELP %.*s %.*s\n",
|
||||
static_cast<int>(name.length()), name.data(),
|
||||
@@ -983,8 +1279,10 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
|
||||
ArenaVector<std::pair<std::string_view, std::string_view>> labels_sv(
|
||||
&arena);
|
||||
|
||||
// Format callback values
|
||||
for (const auto &[labels_key, callback] : family->callbacks) {
|
||||
auto value = callback();
|
||||
auto value = next_value++->as_double;
|
||||
labels_sv.clear();
|
||||
for (size_t i = 0; i < labels_key.labels.size(); ++i) {
|
||||
labels_sv.push_back(labels_key.labels[i]);
|
||||
@@ -996,36 +1294,15 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
value));
|
||||
}
|
||||
|
||||
// Aggregate all counter values (thread-local + global accumulated)
|
||||
std::unordered_map<LabelsKey, double, std::hash<LabelsKey>,
|
||||
std::equal_to<LabelsKey>,
|
||||
ArenaStlAllocator<std::pair<const LabelsKey, double>>>
|
||||
aggregated_values{
|
||||
ArenaStlAllocator<std::pair<const LabelsKey, double>>(&arena)};
|
||||
// Use pre-computed data (same as compute phase)
|
||||
const auto &family_data = label_sets.counter_data[counter_family_idx++];
|
||||
|
||||
// First, add thread-local values
|
||||
for (const auto &[thread_id, per_thread] : family->per_thread_state) {
|
||||
for (const auto &[labels_key, instance] : per_thread.instances) {
|
||||
// Atomic read to match atomic store in Counter::inc()
|
||||
double value;
|
||||
__atomic_load(&instance->value, &value, __ATOMIC_RELAXED);
|
||||
aggregated_values[labels_key] += value;
|
||||
}
|
||||
}
|
||||
|
||||
// Then, add globally accumulated values from destroyed threads
|
||||
for (const auto &[labels_key, global_state] :
|
||||
family->global_accumulated_values) {
|
||||
if (global_state) {
|
||||
aggregated_values[labels_key] += global_state->value;
|
||||
}
|
||||
}
|
||||
|
||||
// Render aggregated counter values
|
||||
for (const auto &[labels_key, total_value] : aggregated_values) {
|
||||
// Format counter values using pre-computed values
|
||||
for (const auto &data : family_data) {
|
||||
auto total_value = next_value++->as_double;
|
||||
labels_sv.clear();
|
||||
for (size_t i = 0; i < labels_key.labels.size(); ++i) {
|
||||
labels_sv.push_back(labels_key.labels[i]);
|
||||
for (size_t i = 0; i < data.labels_key.labels.size(); ++i) {
|
||||
labels_sv.push_back(data.labels_key.labels[i]);
|
||||
}
|
||||
auto labels = format_labels(labels_sv);
|
||||
output.push_back(format(arena, "%.*s%.*s %.17g\n",
|
||||
@@ -1035,7 +1312,8 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
}
|
||||
}
|
||||
|
||||
// Render gauges
|
||||
// Format gauges - ITERATION ORDER MUST MATCH COMPUTE PHASE
|
||||
size_t gauge_family_idx = 0;
|
||||
for (const auto &[name, family] : Metric::get_gauge_families()) {
|
||||
output.push_back(format(arena, "# HELP %.*s %.*s\n",
|
||||
static_cast<int>(name.length()), name.data(),
|
||||
@@ -1046,8 +1324,10 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
|
||||
ArenaVector<std::pair<std::string_view, std::string_view>> labels_sv(
|
||||
&arena);
|
||||
|
||||
// Format callback values
|
||||
for (const auto &[labels_key, callback] : family->callbacks) {
|
||||
auto value = callback();
|
||||
auto value = next_value++->as_double;
|
||||
labels_sv.clear();
|
||||
for (size_t i = 0; i < labels_key.labels.size(); ++i) {
|
||||
labels_sv.push_back(labels_key.labels[i]);
|
||||
@@ -1059,12 +1339,13 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
value));
|
||||
}
|
||||
|
||||
for (const auto &[labels_key, instance] : family->instances) {
|
||||
auto value = std::bit_cast<double>(
|
||||
instance->value.load(std::memory_order_relaxed));
|
||||
// Use pre-computed data (same as compute phase)
|
||||
const auto &family_data = label_sets.gauge_data[gauge_family_idx++];
|
||||
for (const auto &data : family_data) {
|
||||
auto value = next_value++->as_double;
|
||||
labels_sv.clear();
|
||||
for (size_t i = 0; i < labels_key.labels.size(); ++i) {
|
||||
labels_sv.push_back(labels_key.labels[i]);
|
||||
for (size_t i = 0; i < data.labels_key.labels.size(); ++i) {
|
||||
labels_sv.push_back(data.labels_key.labels[i]);
|
||||
}
|
||||
auto labels = format_labels(labels_sv);
|
||||
output.push_back(format(arena, "%.*s%.*s %.17g\n",
|
||||
@@ -1074,7 +1355,8 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
}
|
||||
}
|
||||
|
||||
// Render histograms
|
||||
// Format histograms - ITERATION ORDER MUST MATCH COMPUTE PHASE
|
||||
size_t histogram_family_idx = 0;
|
||||
for (const auto &[name, family] : Metric::get_histogram_families()) {
|
||||
output.push_back(format(arena, "# HELP %.*s %.*s\n",
|
||||
static_cast<int>(name.length()), name.data(),
|
||||
@@ -1083,166 +1365,67 @@ std::span<std::string_view> render(ArenaAllocator &arena) {
|
||||
output.push_back(format(arena, "# TYPE %.*s histogram\n",
|
||||
static_cast<int>(name.length()), name.data()));
|
||||
|
||||
// Aggregate all histogram values (thread-local + global accumulated)
|
||||
// Use a simpler structure to avoid tuple constructor issues
|
||||
struct AggregatedHistogram {
|
||||
ArenaVector<double> thresholds;
|
||||
ArenaVector<uint64_t> counts;
|
||||
double sum;
|
||||
uint64_t observations;
|
||||
|
||||
AggregatedHistogram(ArenaAllocator &arena)
|
||||
: thresholds(&arena), counts(&arena), sum(0.0), observations(0) {}
|
||||
};
|
||||
std::unordered_map<
|
||||
LabelsKey, AggregatedHistogram *, std::hash<LabelsKey>,
|
||||
std::equal_to<LabelsKey>,
|
||||
ArenaStlAllocator<std::pair<const LabelsKey, AggregatedHistogram *>>>
|
||||
aggregated_histograms{ArenaStlAllocator<
|
||||
std::pair<const LabelsKey, AggregatedHistogram *>>(&arena)};
|
||||
// Use pre-computed data (same as compute phase)
|
||||
const auto &family_data = label_sets.histogram_data[histogram_family_idx++];
|
||||
|
||||
ArenaVector<std::pair<std::string_view, std::string_view>> bucket_labels_sv(
|
||||
&arena);
|
||||
|
||||
// First, collect thread-local histogram data
|
||||
for (const auto &[thread_id, per_thread] : family->per_thread_state) {
|
||||
for (const auto &[labels_key, instance] : per_thread.instances) {
|
||||
// Extract data under lock - minimize critical section
|
||||
// Note: thresholds and counts sizes never change after histogram
|
||||
// creation
|
||||
ArenaVector<double> thresholds_snapshot(&arena);
|
||||
ArenaVector<uint64_t> counts_snapshot(&arena);
|
||||
double sum_snapshot;
|
||||
uint64_t observations_snapshot;
|
||||
// Format histogram data using pre-computed values
|
||||
for (const auto &data : family_data) {
|
||||
// Get bucket count from pre-computed data
|
||||
size_t bucket_count = data.bucket_count;
|
||||
|
||||
// Copy data with minimal critical section
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(instance->mutex);
|
||||
// Copy thresholds
|
||||
for (size_t i = 0; i < instance->thresholds.size(); ++i) {
|
||||
thresholds_snapshot.push_back(instance->thresholds[i]);
|
||||
}
|
||||
// Copy counts
|
||||
for (size_t i = 0; i < instance->counts.size(); ++i) {
|
||||
counts_snapshot.push_back(instance->counts[i]);
|
||||
}
|
||||
sum_snapshot = instance->sum;
|
||||
observations_snapshot = instance->observations;
|
||||
}
|
||||
|
||||
// Initialize or aggregate into aggregated_histograms
|
||||
auto it = aggregated_histograms.find(labels_key);
|
||||
if (it == aggregated_histograms.end()) {
|
||||
// Create new entry
|
||||
auto *agg_hist = new (arena.allocate_raw(
|
||||
sizeof(AggregatedHistogram), alignof(AggregatedHistogram)))
|
||||
AggregatedHistogram(arena);
|
||||
for (size_t i = 0; i < thresholds_snapshot.size(); ++i) {
|
||||
agg_hist->thresholds.push_back(thresholds_snapshot[i]);
|
||||
}
|
||||
for (size_t i = 0; i < counts_snapshot.size(); ++i) {
|
||||
agg_hist->counts.push_back(counts_snapshot[i]);
|
||||
}
|
||||
agg_hist->sum = sum_snapshot;
|
||||
agg_hist->observations = observations_snapshot;
|
||||
aggregated_histograms[labels_key] = agg_hist;
|
||||
} else {
|
||||
// Aggregate with existing entry
|
||||
auto *agg_hist = it->second;
|
||||
// Aggregate counts
|
||||
for (size_t i = 0; i < counts_snapshot.size(); ++i) {
|
||||
agg_hist->counts[i] += counts_snapshot[i];
|
||||
}
|
||||
agg_hist->sum += sum_snapshot;
|
||||
agg_hist->observations += observations_snapshot;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then, add globally accumulated values from destroyed threads
|
||||
for (const auto &[labels_key, global_state] :
|
||||
family->global_accumulated_values) {
|
||||
if (global_state) {
|
||||
auto it = aggregated_histograms.find(labels_key);
|
||||
if (it == aggregated_histograms.end()) {
|
||||
// Create new entry from global state
|
||||
auto *agg_hist = new (arena.allocate_raw(
|
||||
sizeof(AggregatedHistogram), alignof(AggregatedHistogram)))
|
||||
AggregatedHistogram(arena);
|
||||
for (size_t i = 0; i < global_state->thresholds.size(); ++i) {
|
||||
agg_hist->thresholds.push_back(global_state->thresholds[i]);
|
||||
}
|
||||
for (size_t i = 0; i < global_state->counts.size(); ++i) {
|
||||
agg_hist->counts.push_back(global_state->counts[i]);
|
||||
}
|
||||
agg_hist->sum = global_state->sum;
|
||||
agg_hist->observations = global_state->observations;
|
||||
aggregated_histograms[labels_key] = agg_hist;
|
||||
} else {
|
||||
// Add global accumulated values to existing entry
|
||||
auto *agg_hist = it->second;
|
||||
for (size_t i = 0; i < global_state->counts.size(); ++i) {
|
||||
agg_hist->counts[i] += global_state->counts[i];
|
||||
}
|
||||
agg_hist->sum += global_state->sum;
|
||||
agg_hist->observations += global_state->observations;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Render aggregated histogram data
|
||||
for (const auto &[labels_key, agg_hist] : aggregated_histograms) {
|
||||
|
||||
// Render explicit bucket counts
|
||||
for (size_t i = 0; i < agg_hist->thresholds.size(); ++i) {
|
||||
// Format explicit bucket counts
|
||||
for (size_t i = 0; i < bucket_count; ++i) {
|
||||
auto count = next_value++->as_uint64;
|
||||
bucket_labels_sv.clear();
|
||||
for (size_t j = 0; j < labels_key.labels.size(); ++j) {
|
||||
bucket_labels_sv.push_back(labels_key.labels[j]);
|
||||
for (size_t j = 0; j < data.labels_key.labels.size(); ++j) {
|
||||
bucket_labels_sv.push_back(data.labels_key.labels[j]);
|
||||
}
|
||||
|
||||
bucket_labels_sv.push_back(
|
||||
{"le", static_format(arena, agg_hist->thresholds[i])});
|
||||
{"le", static_format(arena, family->buckets[i])});
|
||||
auto labels = format_labels(bucket_labels_sv);
|
||||
output.push_back(format(
|
||||
arena, "%.*s_bucket%.*s %llu\n", static_cast<int>(name.length()),
|
||||
name.data(), static_cast<int>(labels.length()), labels.data(),
|
||||
static_cast<unsigned long long>(agg_hist->counts[i])));
|
||||
static_cast<unsigned long long>(count)));
|
||||
}
|
||||
|
||||
// Render +Inf bucket using total observations count
|
||||
// Format +Inf bucket
|
||||
auto observations = next_value++->as_uint64;
|
||||
bucket_labels_sv.clear();
|
||||
for (size_t j = 0; j < labels_key.labels.size(); ++j) {
|
||||
bucket_labels_sv.push_back(labels_key.labels[j]);
|
||||
for (size_t j = 0; j < data.labels_key.labels.size(); ++j) {
|
||||
bucket_labels_sv.push_back(data.labels_key.labels[j]);
|
||||
}
|
||||
bucket_labels_sv.push_back({"le", "+Inf"});
|
||||
auto inf_labels = format_labels(bucket_labels_sv);
|
||||
output.push_back(format(
|
||||
arena, "%.*s_bucket%.*s %llu\n", static_cast<int>(name.length()),
|
||||
name.data(), static_cast<int>(inf_labels.length()), inf_labels.data(),
|
||||
static_cast<unsigned long long>(agg_hist->observations)));
|
||||
static_cast<unsigned long long>(observations)));
|
||||
|
||||
// Render sum
|
||||
// Format sum
|
||||
auto sum = next_value++->as_double;
|
||||
bucket_labels_sv.clear();
|
||||
for (size_t j = 0; j < labels_key.labels.size(); ++j) {
|
||||
bucket_labels_sv.push_back(labels_key.labels[j]);
|
||||
for (size_t j = 0; j < data.labels_key.labels.size(); ++j) {
|
||||
bucket_labels_sv.push_back(data.labels_key.labels[j]);
|
||||
}
|
||||
auto labels = format_labels(bucket_labels_sv);
|
||||
output.push_back(format(arena, "%.*s_sum%.*s %.17g\n",
|
||||
output.push_back(format(
|
||||
arena, "%.*s_sum%.*s %.17g\n", static_cast<int>(name.length()),
|
||||
name.data(), static_cast<int>(labels.length()), labels.data(), sum));
|
||||
|
||||
// Format count
|
||||
auto count = next_value++->as_uint64;
|
||||
output.push_back(format(arena, "%.*s_count%.*s %llu\n",
|
||||
static_cast<int>(name.length()), name.data(),
|
||||
static_cast<int>(labels.length()), labels.data(),
|
||||
agg_hist->sum));
|
||||
|
||||
// Render count
|
||||
output.push_back(format(
|
||||
arena, "%.*s_count%.*s %llu\n", static_cast<int>(name.length()),
|
||||
name.data(), static_cast<int>(labels.length()), labels.data(),
|
||||
static_cast<unsigned long long>(agg_hist->observations)));
|
||||
static_cast<unsigned long long>(count)));
|
||||
}
|
||||
}
|
||||
|
||||
auto result = arena.allocate<std::string_view>(output.size());
|
||||
std::copy(output.data(), output.data() + output.size(), result);
|
||||
return std::span<std::string_view>(result, output.size());
|
||||
return output;
|
||||
}
|
||||
|
||||
// Template specialization implementations for register_callback
|
||||
@@ -1295,4 +1478,31 @@ void Family<Gauge>::register_callback(
|
||||
std::mutex Metric::mutex;
|
||||
thread_local Metric::ThreadInit Metric::thread_init;
|
||||
|
||||
void reset_metrics_for_testing() {
|
||||
std::lock_guard _{Metric::mutex};
|
||||
|
||||
// WARNING: This function assumes no metric objects are in use!
|
||||
// Clear all family maps - this will leak the Family::State objects but
|
||||
// that's acceptable for testing since they were allocated in the global arena
|
||||
|
||||
// Get references to the maps
|
||||
auto &counter_families = Metric::get_counter_families();
|
||||
auto &gauge_families = Metric::get_gauge_families();
|
||||
auto &histogram_families = Metric::get_histogram_families();
|
||||
auto &interned_labels = Metric::get_interned_labels();
|
||||
|
||||
// Clear all family registrations
|
||||
counter_families.clear();
|
||||
gauge_families.clear();
|
||||
histogram_families.clear();
|
||||
interned_labels.clear();
|
||||
|
||||
// Reset the global arena - this will invalidate all arena-allocated strings
|
||||
// but since we're clearing everything, that's OK
|
||||
Metric::get_global_arena().reset();
|
||||
|
||||
// Note: Thread-local arenas will be cleaned up by ThreadInit destructors
|
||||
// when threads exit naturally
|
||||
}
|
||||
|
||||
} // namespace metric
|
||||
|
||||
@@ -218,6 +218,11 @@ bool is_valid_metric_name(std::string_view name);
|
||||
bool is_valid_label_key(std::string_view key);
|
||||
bool is_valid_label_value(std::string_view value);
|
||||
|
||||
// Reset all metrics state - WARNING: Only safe for testing!
|
||||
// This clears all registered families and metrics. Should only be called
|
||||
// when no metric objects are in use and no concurrent render() calls.
|
||||
void reset_metrics_for_testing();
|
||||
|
||||
// Note: Histograms do not support callbacks due to their multi-value nature
|
||||
// (buckets + sum + count). Use static histogram metrics only.
|
||||
|
||||
|
||||
@@ -598,3 +598,162 @@ TEST_CASE("format function fallback codepath") {
|
||||
CHECK(result == "Valid format: 42");
|
||||
}
|
||||
}
|
||||
|
||||
// Test object with non-trivial destructor for ArenaAllocator::Ptr testing
|
||||
class TestObject {
|
||||
public:
|
||||
static int destructor_count;
|
||||
static int constructor_count;
|
||||
|
||||
int value;
|
||||
|
||||
TestObject(int v) : value(v) { constructor_count++; }
|
||||
|
||||
~TestObject() { destructor_count++; }
|
||||
|
||||
static void reset_counters() {
|
||||
constructor_count = 0;
|
||||
destructor_count = 0;
|
||||
}
|
||||
};
|
||||
|
||||
int TestObject::destructor_count = 0;
|
||||
int TestObject::constructor_count = 0;
|
||||
|
||||
// Test struct with trivial destructor
|
||||
struct TrivialObject {
|
||||
int value;
|
||||
TrivialObject(int v) : value(v) {}
|
||||
};
|
||||
|
||||
TEST_CASE("ArenaAllocator::Ptr smart pointer functionality") {
|
||||
TestObject::reset_counters();
|
||||
|
||||
SUBCASE("construct returns raw pointer for trivially destructible types") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
auto ptr = arena.construct<TrivialObject>(42);
|
||||
static_assert(std::is_same_v<decltype(ptr), TrivialObject *>,
|
||||
"construct() should return raw pointer for trivially "
|
||||
"destructible types");
|
||||
CHECK(ptr != nullptr);
|
||||
CHECK(ptr->value == 42);
|
||||
}
|
||||
|
||||
SUBCASE("construct returns ArenaAllocator::Ptr for non-trivially "
|
||||
"destructible types") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
auto ptr = arena.construct<TestObject>(42);
|
||||
static_assert(
|
||||
std::is_same_v<decltype(ptr), ArenaAllocator::Ptr<TestObject>>,
|
||||
"construct() should return ArenaAllocator::Ptr for non-trivially "
|
||||
"destructible types");
|
||||
CHECK(ptr);
|
||||
CHECK(ptr->value == 42);
|
||||
CHECK(TestObject::constructor_count == 1);
|
||||
CHECK(TestObject::destructor_count == 0);
|
||||
}
|
||||
|
||||
SUBCASE("ArenaAllocator::Ptr calls destructor on destruction") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
{
|
||||
auto ptr = arena.construct<TestObject>(42);
|
||||
CHECK(TestObject::constructor_count == 1);
|
||||
CHECK(TestObject::destructor_count == 0);
|
||||
} // ptr goes out of scope
|
||||
|
||||
CHECK(TestObject::destructor_count == 1);
|
||||
}
|
||||
|
||||
SUBCASE("ArenaAllocator::Ptr move semantics") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
auto ptr1 = arena.construct<TestObject>(42);
|
||||
CHECK(TestObject::constructor_count == 1);
|
||||
|
||||
auto ptr2 = std::move(ptr1);
|
||||
CHECK(!ptr1); // ptr1 should be null after move
|
||||
CHECK(ptr2);
|
||||
CHECK(ptr2->value == 42);
|
||||
CHECK(TestObject::destructor_count == 0); // No destruction yet
|
||||
|
||||
ptr2.reset();
|
||||
CHECK(TestObject::destructor_count == 1); // Destructor called
|
||||
}
|
||||
|
||||
SUBCASE("ArenaAllocator::Ptr access operators") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
auto ptr = arena.construct<TestObject>(123);
|
||||
|
||||
// Test operator->
|
||||
CHECK(ptr->value == 123);
|
||||
|
||||
// Test operator*
|
||||
CHECK((*ptr).value == 123);
|
||||
|
||||
// Test get()
|
||||
TestObject *raw_ptr = ptr.get();
|
||||
CHECK(raw_ptr != nullptr);
|
||||
CHECK(raw_ptr->value == 123);
|
||||
|
||||
// Test bool conversion
|
||||
CHECK(ptr);
|
||||
CHECK(static_cast<bool>(ptr) == true);
|
||||
}
|
||||
|
||||
SUBCASE("ArenaAllocator::Ptr reset functionality") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
auto ptr = arena.construct<TestObject>(42);
|
||||
CHECK(TestObject::constructor_count == 1);
|
||||
CHECK(TestObject::destructor_count == 0);
|
||||
|
||||
ptr.reset();
|
||||
CHECK(!ptr);
|
||||
CHECK(TestObject::destructor_count == 1);
|
||||
|
||||
// Reset with new object
|
||||
TestObject *raw_obj = arena.construct<TestObject>(84).release();
|
||||
ptr.reset(raw_obj);
|
||||
CHECK(ptr);
|
||||
CHECK(ptr->value == 84);
|
||||
CHECK(TestObject::constructor_count == 2);
|
||||
CHECK(TestObject::destructor_count == 1);
|
||||
}
|
||||
|
||||
SUBCASE("ArenaAllocator::Ptr release functionality") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
auto ptr = arena.construct<TestObject>(42);
|
||||
TestObject *raw_ptr = ptr.release();
|
||||
|
||||
CHECK(!ptr); // ptr should be null after release
|
||||
CHECK(raw_ptr != nullptr);
|
||||
CHECK(raw_ptr->value == 42);
|
||||
CHECK(TestObject::destructor_count == 0); // No destructor called
|
||||
|
||||
// Manually call destructor (since we released ownership)
|
||||
raw_ptr->~TestObject();
|
||||
CHECK(TestObject::destructor_count == 1);
|
||||
}
|
||||
|
||||
SUBCASE("ArenaAllocator::Ptr move assignment") {
|
||||
ArenaAllocator arena;
|
||||
|
||||
auto ptr1 = arena.construct<TestObject>(42);
|
||||
auto ptr2 = arena.construct<TestObject>(84);
|
||||
|
||||
CHECK(TestObject::constructor_count == 2);
|
||||
CHECK(TestObject::destructor_count == 0);
|
||||
|
||||
ptr1 = std::move(ptr2); // Should destroy first object, move second
|
||||
|
||||
CHECK(!ptr2); // ptr2 should be null
|
||||
CHECK(ptr1);
|
||||
CHECK(ptr1->value == 84);
|
||||
CHECK(TestObject::destructor_count == 1); // First object destroyed
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@
|
||||
#include <atomic>
|
||||
#include <chrono>
|
||||
#include <cmath>
|
||||
#include <fstream>
|
||||
#include <latch>
|
||||
#include <sstream>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
|
||||
@@ -273,11 +275,9 @@ TEST_CASE("callback-based metrics") {
|
||||
auto gauge_family = metric::create_gauge("callback_gauge", "Callback gauge");
|
||||
|
||||
SUBCASE("counter callback") {
|
||||
std::atomic<double> counter_value{42.0};
|
||||
|
||||
counter_family.register_callback(
|
||||
{{"type", "callback"}},
|
||||
[&counter_value]() { return counter_value.load(); });
|
||||
counter_family.register_callback({{"type", "callback"}},
|
||||
[]() { return 42.0; });
|
||||
|
||||
// Callback should be called during render
|
||||
ArenaAllocator arena;
|
||||
@@ -286,11 +286,8 @@ TEST_CASE("callback-based metrics") {
|
||||
}
|
||||
|
||||
SUBCASE("gauge callback") {
|
||||
std::atomic<double> gauge_value{123.5};
|
||||
|
||||
gauge_family.register_callback({{"type", "callback"}}, [&gauge_value]() {
|
||||
return gauge_value.load();
|
||||
});
|
||||
gauge_family.register_callback({{"type", "callback"}},
|
||||
[]() { return 123.5; });
|
||||
|
||||
ArenaAllocator arena;
|
||||
auto output = metric::render(arena);
|
||||
@@ -657,3 +654,98 @@ TEST_CASE("memory management") {
|
||||
CHECK(final_output.size() > 0);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("render output deterministic order golden test") {
|
||||
// Clean slate - reset all metrics before this test
|
||||
metric::reset_metrics_for_testing();
|
||||
|
||||
ArenaAllocator arena;
|
||||
|
||||
// Create a comprehensive set of metrics with deliberate ordering
|
||||
// to test deterministic output
|
||||
|
||||
// Create counters with different family names and labels
|
||||
auto z_counter_family =
|
||||
metric::create_counter("z_last_counter", "Last counter alphabetically");
|
||||
auto z_counter =
|
||||
z_counter_family.create({{"method", "POST"}, {"handler", "api"}});
|
||||
z_counter.inc(42.0);
|
||||
|
||||
auto a_counter_family =
|
||||
metric::create_counter("a_first_counter", "First counter alphabetically");
|
||||
auto a_counter1 = a_counter_family.create({{"status", "200"}});
|
||||
auto a_counter2 = a_counter_family.create(
|
||||
{{"method", "GET"}}); // Should come before status lexicographically
|
||||
a_counter1.inc(100.0);
|
||||
a_counter2.inc(200.0);
|
||||
|
||||
// Create gauges with different orderings
|
||||
auto m_gauge_family = metric::create_gauge("m_middle_gauge", "Middle gauge");
|
||||
auto m_gauge = m_gauge_family.create({{"type", "memory"}});
|
||||
m_gauge.set(1024.0);
|
||||
|
||||
auto b_gauge_family = metric::create_gauge("b_second_gauge", "Second gauge");
|
||||
auto b_gauge = b_gauge_family.create({{"region", "us-west"}});
|
||||
b_gauge.set(256.0);
|
||||
|
||||
// Create histograms
|
||||
auto x_hist_family = metric::create_histogram("x_histogram", "Test histogram",
|
||||
{0.1, 0.5, 1.0});
|
||||
auto x_hist = x_hist_family.create({{"endpoint", "/api/v1"}});
|
||||
x_hist.observe(0.25);
|
||||
x_hist.observe(0.75);
|
||||
|
||||
// Add some callbacks to test callback ordering
|
||||
a_counter_family.register_callback({{"callback", "test"}},
|
||||
[]() { return 123.0; });
|
||||
m_gauge_family.register_callback({{"callback", "dynamic"}},
|
||||
[]() { return 456.0; });
|
||||
|
||||
// Render the metrics
|
||||
auto output = metric::render(arena);
|
||||
|
||||
// Concatenate all output into a single string
|
||||
std::ostringstream oss;
|
||||
for (const auto &line : output) {
|
||||
oss << line;
|
||||
}
|
||||
std::string actual_output = oss.str();
|
||||
|
||||
// Define expected golden output - this represents the exact expected
|
||||
// deterministic order
|
||||
std::string expected_golden =
|
||||
"# HELP a_first_counter First counter alphabetically\n"
|
||||
"# TYPE a_first_counter counter\n"
|
||||
"a_first_counter{callback=\"test\"} 123\n"
|
||||
"a_first_counter{method=\"GET\"} 200\n"
|
||||
"a_first_counter{status=\"200\"} 100\n"
|
||||
"# HELP z_last_counter Last counter alphabetically\n"
|
||||
"# TYPE z_last_counter counter\n"
|
||||
"z_last_counter{handler=\"api\",method=\"POST\"} 42\n"
|
||||
"# HELP b_second_gauge Second gauge\n"
|
||||
"# TYPE b_second_gauge gauge\n"
|
||||
"b_second_gauge{region=\"us-west\"} 256\n"
|
||||
"# HELP m_middle_gauge Middle gauge\n"
|
||||
"# TYPE m_middle_gauge gauge\n"
|
||||
"m_middle_gauge{callback=\"dynamic\"} 456\n"
|
||||
"m_middle_gauge{type=\"memory\"} 1024\n"
|
||||
"# HELP x_histogram Test histogram\n"
|
||||
"# TYPE x_histogram histogram\n"
|
||||
"x_histogram_bucket{endpoint=\"/api/v1\",le=\"0.1\"} 0\n"
|
||||
"x_histogram_bucket{endpoint=\"/api/v1\",le=\"0.5\"} 1\n"
|
||||
"x_histogram_bucket{endpoint=\"/api/v1\",le=\"1.0\"} 2\n"
|
||||
"x_histogram_bucket{endpoint=\"/api/v1\",le=\"+Inf\"} 2\n"
|
||||
"x_histogram_sum{endpoint=\"/api/v1\"} 1\n"
|
||||
"x_histogram_count{endpoint=\"/api/v1\"} 2\n";
|
||||
|
||||
// Check if output matches golden file
|
||||
if (actual_output != expected_golden) {
|
||||
MESSAGE("Render output does not match expected golden output.");
|
||||
MESSAGE("This indicates the deterministic ordering has changed.");
|
||||
MESSAGE("Expected output:\n" << expected_golden);
|
||||
MESSAGE("Actual output:\n" << actual_output);
|
||||
CHECK(false); // Force test failure
|
||||
} else {
|
||||
CHECK(true); // Test passes
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user