diff --git a/internals/CMakeLists.txt b/internals/CMakeLists.txt index 5dfcf23..6c225ac 100644 --- a/internals/CMakeLists.txt +++ b/internals/CMakeLists.txt @@ -1,2 +1,4 @@ -add_conventional_library(testing PUBLIC trade_v1) +add_conventional_library(testing) +target_link_libraries(testing PUBLIC trade_v1) + add_conventional_executable_tests(PRIVATE testing trade_v1 testing_v1 std_thread) diff --git a/internals/include/testing/counted_ptr.hpp b/internals/include/testing/counted_ptr.hpp new file mode 100644 index 0000000..0f7aea3 --- /dev/null +++ b/internals/include/testing/counted_ptr.hpp @@ -0,0 +1,139 @@ +#pragma once + +#include +#include + +#include "testing/config.hpp" + +namespace testing { + +template class counted_ptr; + +class Private { + template friend class counted_ptr; + + struct count_t { + count_t *m_next; + const void *m_object; + size_t m_count; + }; + + struct lock_t { + std::atomic m_first; + }; + + static constexpr uint32_t n_locks = 131071; + static lock_t s_locks[n_locks]; + + static lock_t *lock_of(void *ptr); + + static bool try_acquire(lock_t *lock, + const std::atomic *at, + void *pointer, + count_t **first); + + static void release(lock_t *lock, count_t *first); + + static void *destroy(std::atomic *to); + static void *destroy_and_set(std::atomic *to, void *after); + + static void create_from_non_null(void *from); + static void *copy_from(const std::atomic *from); + + static void *move_to(std::atomic *to, std::atomic *from); +}; + +template class counted_ptr : Private { + std::atomic m_pointer; + +public: + ~counted_ptr() { delete static_cast(Private::destroy(&m_pointer)); } + + counted_ptr() : m_pointer(nullptr) {} + counted_ptr(std::nullptr_t) : m_pointer(nullptr) {} + + explicit counted_ptr(T *pointer) : m_pointer(pointer) { + if (pointer) + Private::create_from_non_null(pointer); + } + + counted_ptr(const counted_ptr &that) + : m_pointer(static_cast(Private::copy_from(&that.m_pointer))) {} + + counted_ptr(counted_ptr &&that) : m_pointer(nullptr) { + Private::move_to(&m_pointer, &that.m_pointer); + } + + counted_ptr &operator=(std::nullptr_t) { + delete static_cast(Private::destroy_and_set(&m_pointer, nullptr)); + return *this; + } + + counted_ptr &operator=(const counted_ptr &that) { + if (this != &that) + delete static_cast( + Private::destroy_and_set(&m_pointer, copy_from(&that.m_pointer))); + return *this; + } + + counted_ptr &operator=(counted_ptr &&that) { + if (this != &that) + delete static_cast(Private::move_to(&m_pointer, &that.m_pointer)); + return *this; + } + + void reset(T *pointer) { + if (pointer) + create_from_non_null(pointer); + delete static_cast(destroy_and_set(&m_pointer, pointer)); + } + + T *get() const { + return static_cast(m_pointer.load(std::memory_order_relaxed)); + } + + T *operator->() const { return get(); } + + bool operator!() const { return !get(); } + + operator bool() const { return !!get(); } +}; // namespace testing + +} // namespace testing + +namespace std { + +template class atomic> { + using T = testing::counted_ptr; + T m_ptr; + +public: + atomic() {} + atomic(std::nullptr_t) {} + + atomic(const T &ptr) : m_ptr(ptr) {} + + atomic(const atomic &) = delete; + + static constexpr bool is_always_lock_free = false; + + bool is_lock_free() const { return false; } + + T load() const { return m_ptr; } + T load(memory_order) const { return m_ptr; } + + operator T() const { return m_ptr; } + + void store(nullptr_t) { m_ptr = nullptr; } + void store(const T &desired) { m_ptr = desired; } + + void store(nullptr_t, memory_order) { m_ptr = nullptr; } + void store(const T &desired, memory_order) { m_ptr = desired; } + + T operator=(nullptr_t) { return m_ptr = nullptr; } + T operator=(const T &desired) { return m_ptr = desired; } + + atomic &operator=(const atomic &) = delete; +}; + +} // namespace std diff --git a/internals/include/testing/queue_tm.hpp b/internals/include/testing/queue_tm.hpp index 645a57c..a4d5b38 100644 --- a/internals/include/testing/queue_tm.hpp +++ b/internals/include/testing/queue_tm.hpp @@ -6,14 +6,16 @@ #include #include -#include "testing/config.hpp" +#include "testing/counted_ptr.hpp" namespace testing { /// A transactional queue for testing purposes. template class queue_tm { + template using ptr_t = counted_ptr; + struct node_t { - trade::atom> m_next; + trade::atom> m_next; Value m_value; #ifndef NDEBUG ~node_t() { --s_live_nodes; } @@ -27,8 +29,8 @@ template class queue_tm { } }; - trade::atom> m_first; - trade::atom> m_last; + trade::atom> m_first; + trade::atom> m_last; public: using value_t = Value; @@ -80,8 +82,7 @@ template void queue_tm::clear() { template template void queue_tm::push_back(ForwardableValue &&value) { - std::shared_ptr node( - new node_t(std::forward(value))); + ptr_t node(new node_t(std::forward(value))); trade::atomically([&]() { if (auto prev = m_last.load()) prev->m_next = m_last = node; @@ -93,8 +94,7 @@ void queue_tm::push_back(ForwardableValue &&value) { template template void queue_tm::push_front(ForwardableValue &&value) { - std::shared_ptr node( - new node_t(std::forward(value))); + ptr_t node(new node_t(std::forward(value))); trade::atomically([&]() { if (auto next = m_first.load()) { m_first = node; diff --git a/internals/library/counted_ptr.cpp b/internals/library/counted_ptr.cpp new file mode 100644 index 0000000..9868e4b --- /dev/null +++ b/internals/library/counted_ptr.cpp @@ -0,0 +1,164 @@ +#include "testing/counted_ptr.hpp" + +#include "molecular_v1/backoff.hpp" + +testing::Private::lock_t testing::Private::s_locks[testing::Private::n_locks]; + +testing::Private::lock_t *testing::Private::lock_of(void *ptr) { + return &s_locks[reinterpret_cast(ptr) % n_locks]; +} + +bool testing::Private::try_acquire(lock_t *lock, + const std::atomic *at, + void *pointer, + count_t **first_out) { + auto locked = reinterpret_cast(1); + auto first = lock->m_first.load(std::memory_order_relaxed); + if (first != locked && lock->m_first.compare_exchange_weak(first, locked)) { + if (pointer == at->load()) { + *first_out = first; + return true; + } + lock->m_first.store(first, std::memory_order_relaxed); + } + return false; +} + +void testing::Private::release(lock_t *lock, count_t *first) { + lock->m_first.store(first, std::memory_order_release); +} + +void *testing::Private::destroy(std::atomic *to) { + void *to_pointer; + lock_t *to_lock; + count_t *to_first; + + molecular_v1::backoff backoff; + while (true) { + to_pointer = to->load(); + if (!to_pointer) + return nullptr; + to_lock = lock_of(to_pointer); + if (try_acquire(to_lock, to, to_pointer, &to_first)) + break; + backoff(); + } + + count_t *to_count; + auto prev = &to_first; + while (true) { + to_count = *prev; + if (to_count->m_object == to_pointer) { + if (!--(to_count->m_count)) { + *prev = to_count->m_next; + } else { + to_count = nullptr; + to_pointer = nullptr; + } + break; + } else { + prev = &to_count->m_next; + } + } + + release(to_lock, to_first); + + delete to_count; + + return to_pointer; +} + +void *testing::Private::destroy_and_set(std::atomic *to, void *after) { + void *to_pointer; + lock_t *to_lock; + count_t *to_first; + + molecular_v1::backoff backoff; + while (true) { + to_pointer = to->load(); + if (!to_pointer && + (nullptr == after || to->compare_exchange_strong(to_pointer, after))) + return nullptr; + to_lock = lock_of(to_pointer); + if (try_acquire(to_lock, to, to_pointer, &to_first)) + break; + backoff(); + } + + count_t *to_count; + auto prev = &to_first; + while (true) { + to_count = *prev; + if (to_count->m_object == to_pointer) { + if (!--(to_count->m_count)) { + *prev = to_count->m_next; + } else { + to_count = nullptr; + to_pointer = nullptr; + } + break; + } else { + prev = &to_count->m_next; + } + } + + to->store(after, std::memory_order_relaxed); + + release(to_lock, to_first); + + delete to_count; + + return to_pointer; +} + +void testing::Private::create_from_non_null(void *from_pointer) { + count_t *from_count = new count_t; + from_count->m_object = from_pointer; + from_count->m_count = 1; + + std::atomic from_atomic(from_pointer); + auto from_lock = lock_of(from_pointer); + + molecular_v1::backoff backoff; + while ( + !try_acquire(from_lock, &from_atomic, from_pointer, &from_count->m_next)) + backoff(); + + release(from_lock, from_count); +} + +void *testing::Private::copy_from(const std::atomic *from) { + void *from_pointer; + lock_t *from_lock; + count_t *from_first; + + molecular_v1::backoff backoff; + while (true) { + from_pointer = from->load(); + if (!from_pointer) + return nullptr; + from_lock = lock_of(from_pointer); + if (try_acquire(from_lock, from, from_pointer, &from_first)) + break; + backoff(); + } + + auto count = from_first; + while (true) { + if (count->m_object == from_pointer) { + ++(count->m_count); + break; + } else { + count = count->m_next; + } + } + + release(from_lock, from_first); + + return from_pointer; +} + +void *testing::Private::move_to(std::atomic *to, + std::atomic *from) { + return destroy_and_set(to, copy_from(from)); +}