Start on Ref/WeakRef

This commit is contained in:
2025-09-10 18:44:37 -04:00
parent 5d289ddd42
commit 7c4d928807

187
src/reference.hpp Normal file
View File

@@ -0,0 +1,187 @@
#pragma once
#include <algorithm>
#include <atomic>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
/**
* @brief Thread-safe reference counting abstraction with shared/weak pointer
* semantics
*
* TODO: Implement custom reference counting system with:
* - Thread-safe reference counting using atomic operations
* - Weak reference support to break circular dependencies
* - Move semantics for efficient transfers
* - Custom deleter support
* - Zero-overhead when not using weak references
*/
namespace detail {
struct ControlBlock {
// Least significant 32 bits are strong reference count
// Most significant 32 bits are weak reference count
std::atomic<uint64_t> ref_counts;
ControlBlock() : ref_counts(1) {} // Start with 1 strong reference
/**
* @brief Increment strong reference count
* @return Previous ref_counts value (both strong and weak counts)
*/
uint64_t increment_strong() noexcept {
uint64_t old_value;
uint64_t new_value;
do {
old_value = ref_counts.load(std::memory_order_relaxed);
uint32_t strong_count = static_cast<uint32_t>(old_value);
uint32_t weak_count = static_cast<uint32_t>(old_value >> 32);
new_value =
(static_cast<uint64_t>(weak_count) << 32) | (strong_count + 1);
} while (!ref_counts.compare_exchange_weak(old_value, new_value,
std::memory_order_relaxed));
return old_value;
}
/**
* @brief Decrement strong reference count
* @return Previous ref_counts value (both strong and weak counts)
*/
uint64_t decrement_strong() noexcept {
uint64_t old_value;
uint64_t new_value;
do {
old_value = ref_counts.load(std::memory_order_relaxed);
uint32_t strong_count = static_cast<uint32_t>(old_value);
uint32_t weak_count = static_cast<uint32_t>(old_value >> 32);
new_value =
(static_cast<uint64_t>(weak_count) << 32) | (strong_count - 1);
} while (!ref_counts.compare_exchange_weak(old_value, new_value,
std::memory_order_acq_rel));
return old_value;
}
/**
* @brief Increment weak reference count
* @return Previous ref_counts value (both strong and weak counts)
*/
uint64_t increment_weak() noexcept {
uint64_t old_value;
uint64_t new_value;
do {
old_value = ref_counts.load(std::memory_order_relaxed);
uint32_t strong_count = static_cast<uint32_t>(old_value);
uint32_t weak_count = static_cast<uint32_t>(old_value >> 32);
new_value = (static_cast<uint64_t>(weak_count + 1) << 32) | strong_count;
} while (!ref_counts.compare_exchange_weak(old_value, new_value,
std::memory_order_relaxed));
return old_value;
}
/**
* @brief Decrement weak reference count
* @return Previous ref_counts value (both strong and weak counts)
*/
uint64_t decrement_weak() noexcept {
uint64_t old_value;
uint64_t new_value;
do {
old_value = ref_counts.load(std::memory_order_relaxed);
uint32_t strong_count = static_cast<uint32_t>(old_value);
uint32_t weak_count = static_cast<uint32_t>(old_value >> 32);
new_value = (static_cast<uint64_t>(weak_count - 1) << 32) | strong_count;
} while (!ref_counts.compare_exchange_weak(old_value, new_value,
std::memory_order_acq_rel));
return old_value;
}
};
} // namespace detail
template <typename T> struct Ref {
T *get() {
if (!control_block)
return nullptr;
constexpr size_t cb_size = sizeof(detail::ControlBlock);
constexpr size_t alignment = alignof(T);
constexpr size_t padded_cb_size =
(cb_size + alignment - 1) & ~(alignment - 1);
return reinterpret_cast<T *>(reinterpret_cast<char *>(control_block) +
padded_cb_size);
}
private:
explicit Ref(detail::ControlBlock *cb) : control_block(cb) {}
Ref() : control_block(nullptr) {}
detail::ControlBlock *control_block;
template <typename U, typename... Args>
friend Ref<U> make_ref(Args &&...args);
template <typename U> friend struct WeakRef;
};
template <typename T> struct WeakRef {
Ref<T> lock() {
if (!control_block) {
return Ref<T>();
}
uint64_t old_value;
uint64_t new_value;
do {
// Use acquire ordering to ensure that any subsequent use of the returned
// Ref (like dereferencing the object pointer) cannot be reordered before
// this safety check. This would ideally use memory_order_consume for
// dependency ordering, but the folk wisdom is "don't use that".
old_value = control_block->ref_counts.load(std::memory_order_acquire);
uint32_t strong_count = static_cast<uint32_t>(old_value);
// If strong count is 0, object is being destroyed
if (strong_count == 0) {
return Ref<T>();
}
uint32_t weak_count = static_cast<uint32_t>(old_value >> 32);
new_value =
(static_cast<uint64_t>(weak_count) << 32) | (strong_count + 1);
} while (!control_block->ref_counts.compare_exchange_weak(
old_value, new_value, std::memory_order_relaxed));
return Ref<T>(control_block);
}
private:
explicit WeakRef(detail::ControlBlock *cb) : control_block(cb) {}
WeakRef() : control_block(nullptr) {}
detail::ControlBlock *control_block;
template <typename U> friend struct Ref;
};
/**
* @brief Create a new Ref with object constructed in-place after control block
*/
template <typename T, typename... Args> Ref<T> make_ref(Args &&...args) {
constexpr size_t cb_size = sizeof(detail::ControlBlock);
constexpr size_t alignment = alignof(T);
constexpr size_t padded_cb_size =
(cb_size + alignment - 1) & ~(alignment - 1);
char *buf = reinterpret_cast<char *>(
std::aligned_alloc(std::max(alignof(detail::ControlBlock), alignment),
padded_cb_size + sizeof(T)));
if (!buf) {
std::fprintf(stderr, "Out of memory\n");
std::abort();
}
auto *cb = new (buf) detail::ControlBlock();
new (buf + padded_cb_size) T{std::forward<Args>(args)...};
return Ref<T>(cb);
}