6 #ifndef LOCK_FREE_REF_COUNT_IMPL
7 #error "This is an impl file and must not be included directly!"
12 #pragma warning(disable: 4127)
15 namespace xenium {
namespace reclamation {
17 template<
class Traits>
18 template<
class T, std::
size_t N,
class Deleter>
19 class lock_free_ref_count<Traits>::enable_concurrent_ptr<T, N, Deleter>::free_list {
22 if (max_local_elements > 0)
23 if (
auto result = local_free_list().pop())
30 guard = acquire_guard(head, std::memory_order_acquire);
31 if (guard.get() ==
nullptr)
36 marked_ptr expected(guard);
37 auto next = guard->next_free().load(std::memory_order_relaxed);
40 if (head.compare_exchange_weak(expected, next, std::memory_order_relaxed))
42 assert((guard->ref_count().load(std::memory_order_relaxed) & RefCountClaimBit) != 0 &&
43 "ClaimBit must be set for a node on the free list");
45 auto ptr = guard.get();
46 ptr->ref_count().fetch_sub(RefCountClaimBit, std::memory_order_relaxed);
47 ptr->next_free().store(
nullptr, std::memory_order_relaxed);
55 assert(node->ref_count().load(std::memory_order_relaxed) & RefCountClaimBit &&
56 "ClaimBit must be set for a node to be put on the free list");
57 if (max_local_elements > 0 && local_free_list().push(node))
60 add_nodes(node, node);
64 void add_nodes(T *first, T *last) {
66 auto old = head.load(std::memory_order_acquire);
68 last->next_free().store(old, std::memory_order_relaxed);
71 }
while (!head.compare_exchange_weak(old, first, std::memory_order_release, std::memory_order_acquire));
76 concurrent_ptr <T, N> head;
78 class thread_local_free_list {
80 ~thread_local_free_list() noexcept {
85 auto next = last->next_free().load(std::memory_order_relaxed);
88 next = next->next_free().load(std::memory_order_relaxed);
90 global_free_list.add_nodes(first, last);
94 if (number_of_elements >= max_local_elements)
96 node->next_free().store(head, std::memory_order_relaxed);
105 assert(number_of_elements > 0);
106 head = result->next_free().load(std::memory_order_relaxed).get();
107 --number_of_elements;
109 result->ref_count().fetch_add(RefCountInc - RefCountClaimBit, std::memory_order_relaxed);
110 result->next_free().store(
nullptr, std::memory_order_relaxed);
116 size_t number_of_elements = 0;
120 static constexpr
size_t max_local_elements = Traits::thread_local_free_list_size;
122 static thread_local_free_list &local_free_list() {
125 alignas(64)
static thread_local thread_local_free_list local_free_list;
126 return local_free_list;
130 template<
class Traits>
131 template<
class T, std::
size_t N,
class Deleter>
132 void* lock_free_ref_count<Traits>::
133 enable_concurrent_ptr<T, N, Deleter>::operator
new(
size_t sz) {
134 assert(sz ==
sizeof(T) &&
"Cannot handle allocations of anything other than T instances");
135 T *result = global_free_list.pop();
136 if (result ==
nullptr) {
137 auto h =
static_cast<header *
>(::operator
new(sz +
sizeof(header)));
138 h->ref_count.store(RefCountInc, std::memory_order_release);
139 result =
static_cast<T *
>(
static_cast<void *
>(h + 1));
145 template<
class Traits>
146 template<
class T, std::
size_t N,
class Deleter>
147 void lock_free_ref_count<Traits>::
148 enable_concurrent_ptr<T, N, Deleter>::operator
delete(
void *p) {
149 auto node =
static_cast<T *
>(p);
150 assert((node->ref_count().load(std::memory_order_relaxed) & RefCountClaimBit) == 0);
152 if (node->decrement_refcnt())
153 node->push_to_free_list();
156 template<
class Traits>
157 template<
class T, std::
size_t N,
class Deleter>
158 bool lock_free_ref_count<Traits>::
159 enable_concurrent_ptr<T, N, Deleter>::decrement_refcnt() {
160 unsigned old_refcnt, new_refcnt;
162 old_refcnt = ref_count().load(std::memory_order_relaxed);
163 new_refcnt = old_refcnt - RefCountInc;
165 new_refcnt = RefCountClaimBit;
167 }
while (!ref_count().compare_exchange_weak(old_refcnt, new_refcnt,
168 new_refcnt == RefCountClaimBit
169 ? std::memory_order_acquire
170 : std::memory_order_release,
171 std::memory_order_relaxed));
174 return (old_refcnt - new_refcnt) & RefCountClaimBit;
177 template<
class Traits>
178 template<
class T,
class MarkedPtr>
179 lock_free_ref_count<Traits>::
180 guard_ptr<T, MarkedPtr>::guard_ptr(
const MarkedPtr &p) noexcept :
182 if (this->ptr.get() !=
nullptr)
183 this->ptr->ref_count().fetch_add(RefCountInc, std::memory_order_relaxed);
186 template<
class Traits>
187 template<
class T,
class MarkedPtr>
188 lock_free_ref_count<Traits>::
189 guard_ptr<T, MarkedPtr>::guard_ptr(
const guard_ptr &p) noexcept :
192 template<
class Traits>
193 template<
class T,
class MarkedPtr>
194 lock_free_ref_count<Traits>::
195 guard_ptr<T, MarkedPtr>::guard_ptr(guard_ptr &&p) noexcept :
200 template<
class Traits>
201 template<
class T,
class MarkedPtr>
202 auto lock_free_ref_count<Traits>::
203 guard_ptr<T, MarkedPtr>::operator=(
const guard_ptr &p)
210 if (this->ptr.get() !=
nullptr)
211 this->ptr->ref_count().fetch_add(RefCountInc, std::memory_order_relaxed);
215 template<
class Traits>
216 template<
class T,
class MarkedPtr>
217 auto lock_free_ref_count<Traits>::
218 guard_ptr<T, MarkedPtr>::operator=(guard_ptr &&p) noexcept
224 this->ptr = std::move(p.ptr);
229 template<
class Traits>
230 template<
class T,
class MarkedPtr>
231 void lock_free_ref_count<Traits>::
232 guard_ptr<T, MarkedPtr>::acquire(
const concurrent_ptr <T> &p, std::memory_order order) noexcept {
242 auto q = p.load(std::memory_order_acquire);
244 if (q.get() ==
nullptr)
249 q->ref_count().fetch_add(RefCountInc, std::memory_order_acquire);
251 if (q == p.load(order))
256 template<
class Traits>
257 template<
class T,
class MarkedPtr>
258 bool lock_free_ref_count<Traits>::
259 guard_ptr<T, MarkedPtr>::acquire_if_equal(
260 const concurrent_ptr <T> &p,
const MarkedPtr &expected, std::memory_order order) noexcept {
263 auto q = p.load(std::memory_order_acquire);
268 if (q.get() ==
nullptr)
273 q->ref_count().fetch_add(RefCountInc, std::memory_order_acquire);
275 if (q == p.load(order))
282 template<
class Traits>
283 template<
class T,
class MarkedPtr>
284 void lock_free_ref_count<Traits>::
285 guard_ptr<T, MarkedPtr>::reset() noexcept {
286 auto p = this->ptr.get();
291 if (p->decrement_refcnt()) {
292 if (!p->is_destroyed())
295 p->push_to_free_list();
299 template<
class Traits>
300 template<
class T,
class MarkedPtr>
301 void lock_free_ref_count<Traits>::
302 guard_ptr<T, MarkedPtr>::reclaim(Deleter) noexcept {
303 if (this->ptr.get() !=
nullptr) {
304 assert(this->ptr->refs() > 1);
309 this->ptr->ref_count().fetch_sub(RefCountInc, std::memory_order_release);
314 template<
class Traits>
315 template<
class T, std::
size_t N,
class Deleter>
316 typename lock_free_ref_count<Traits>::
317 template enable_concurrent_ptr<T, N, Deleter>::free_list
318 lock_free_ref_count<Traits>::
319 enable_concurrent_ptr<T, N, Deleter>::global_free_list;
321 #ifdef TRACK_ALLOCATIONS
322 template <
class Traits>
323 inline detail::allocation_counter& lock_free_ref_count<Traits>::allocation_counter()
324 {
return allocation_counter_; };
326 template <
class Traits>
327 inline void lock_free_ref_count<Traits>::count_allocation()
328 { allocation_counter().count_allocation(); }
330 template <
class Traits>
331 inline void lock_free_ref_count<Traits>::count_reclamation()
332 { allocation_counter().count_reclamation(); }