6#ifndef HAZARD_POINTER_IMPL
7#error "This is an impl file and must not be included directly!"
10#include <xenium/aligned_object.hpp>
11#include <xenium/detail/port.hpp>
19#pragma warning(disable: 4324)
22namespace xenium {
namespace reclamation {
24 template <
class Traits>
25 template <
class T,
class MarkedPtr>
26 hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const MarkedPtr& p) :
30 if (this->ptr.get() !=
nullptr)
32 hp = local_thread_data.alloc_hazard_pointer();
33 hp->set_object(this->ptr.get());
37 template <
class Traits>
38 template <
class T,
class MarkedPtr>
39 hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(
const guard_ptr& p) :
43 template <
class Traits>
44 template <
class T,
class MarkedPtr>
45 hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::guard_ptr(guard_ptr&& p) noexcept :
53 template <
class Traits>
54 template <
class T,
class MarkedPtr>
55 auto hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::operator=(
const guard_ptr& p)
noexcept
62 hp = local_thread_data.alloc_hazard_pointer();
64 hp->set_object(this->ptr.get());
68 template <
class Traits>
69 template <
class T,
class MarkedPtr>
70 auto hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::operator=(guard_ptr&& p)
noexcept
77 this->ptr = std::move(p.ptr);
84 template <
class Traits>
85 template <
class T,
class MarkedPtr>
86 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::acquire(
const concurrent_ptr<T>& p,
87 std::memory_order order)
89 auto p1 = p.load(std::memory_order_relaxed);
92 if (p1 !=
nullptr && hp ==
nullptr)
93 hp = local_thread_data.alloc_hazard_pointer();
104 hp->set_object(p1.get());
107 }
while (p1.get() != p2.get());
112 template <
class Traits>
113 template <
class T,
class MarkedPtr>
114 bool hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::acquire_if_equal(
115 const concurrent_ptr<T>& p,
116 const MarkedPtr& expected,
117 std::memory_order order)
119 auto p1 = p.load(std::memory_order_relaxed);
120 if (p1 ==
nullptr || p1 != expected)
123 return p1 == expected;
127 hp = local_thread_data.alloc_hazard_pointer();
128 hp->set_object(p1.get());
130 this->ptr = p.load(order);
139 template <
class Traits>
140 template <
class T,
class MarkedPtr>
141 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::reset() noexcept
143 local_thread_data.release_hazard_pointer(hp);
147 template <
class Traits>
148 template <
class T,
class MarkedPtr>
149 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::do_swap(guard_ptr& g)
noexcept
154 template <
class Traits>
155 template <
class T,
class MarkedPtr>
156 void hazard_pointer<Traits>::guard_ptr<T, MarkedPtr>::reclaim(Deleter d)
noexcept
158 auto p = this->ptr.get();
160 p->set_deleter(std::move(d));
161 if (local_thread_data.add_retired_node(p) >= allocation_strategy::retired_nodes_threshold())
162 local_thread_data.scan();
166 template <
class Strategy,
class Derived>
167 struct alignas(64) basic_hp_thread_control_block :
168 detail::thread_block_list<Derived>::entry,
169 aligned_object<basic_hp_thread_control_block<Strategy, Derived>>
171 struct hazard_pointer
173 void set_object(detail::deletable_object* obj)
176 value.store(
reinterpret_cast<void**
>(obj), std::memory_order_release);
184 std::atomic_thread_fence(std::memory_order_seq_cst);
187 bool try_get_object(detail::deletable_object*& result)
const
191 constexpr auto memory_order = TSAN_MEMORY_ORDER(std::memory_order_acquire, std::memory_order_relaxed);
192 auto v = value.load(memory_order);
195 result =
reinterpret_cast<detail::deletable_object*
>(v.get());
201 void set_link(hazard_pointer* link)
204 value.store(marked_ptr<void*, 1>(
reinterpret_cast<void**
>(link), 1), std::memory_order_release);
206 hazard_pointer* get_link()
const
209 return reinterpret_cast<hazard_pointer*
>(value.load(std::memory_order_relaxed).get());
214 return value.load(std::memory_order_relaxed).mark() != 0;
219 std::atomic<marked_ptr<void*, 1>> value;
222 using hint = hazard_pointer*;
224 void initialize(hint& hint)
226 Strategy::number_of_active_hps.fetch_add(self().number_of_hps(), std::memory_order_relaxed);
227 hint = initialize_block(self());
232 Strategy::number_of_active_hps.fetch_sub(self().number_of_hps(), std::memory_order_relaxed);
233 detail::thread_block_list<Derived>::entry::abandon();
236 hazard_pointer* alloc_hazard_pointer(hint& hint)
239 if (result ==
nullptr)
240 result = self().need_more_hps();
242 hint = result->get_link();
246 void release_hazard_pointer(hazard_pointer*& hp, hint& hint)
257 Derived& self() {
return static_cast<Derived&
>(*this); }
259 hazard_pointer* begin() {
return &pointers[0]; }
260 hazard_pointer* end() {
return &pointers[Strategy::K]; }
261 const hazard_pointer* begin()
const {
return &pointers[0]; }
262 const hazard_pointer* end()
const {
return &pointers[Strategy::K]; }
264 template <
typename T>
265 static hazard_pointer* initialize_block(T& block)
267 auto begin = block.begin();
268 auto end = block.end() - 1;
269 for (
auto it = begin; it != end;)
275 end->set_link(block.initialize_next_block());
279 static void gather_protected_pointers(std::vector<const detail::deletable_object*>& protected_ptrs,
280 const hazard_pointer* begin,
const hazard_pointer* end)
282 for (
auto it = begin; it != end; ++it)
284 detail::deletable_object* obj;
285 if (it->try_get_object(obj))
286 protected_ptrs.push_back(obj);
290 hazard_pointer pointers[Strategy::K];
293 template <
class Strategy>
294 struct static_hp_thread_control_block :
295 basic_hp_thread_control_block<Strategy, static_hp_thread_control_block<Strategy>>
297 using base = basic_hp_thread_control_block<Strategy, static_hp_thread_control_block>;
298 using hazard_pointer =
typename base::hazard_pointer;
301 void gather_protected_pointers(std::vector<const detail::deletable_object*>& protected_ptrs)
const
303 base::gather_protected_pointers(protected_ptrs, this->begin(), this->end());
306 hazard_pointer* need_more_hps() {
throw bad_hazard_pointer_alloc(
"hazard pointer pool exceeded"); }
307 constexpr size_t number_of_hps()
const {
return Strategy::K; }
308 constexpr hazard_pointer* initialize_next_block()
const {
return nullptr; }
311 template <
class Strategy>
312 struct dynamic_hp_thread_control_block :
313 basic_hp_thread_control_block<Strategy, dynamic_hp_thread_control_block<Strategy>>
315 using base = basic_hp_thread_control_block<Strategy, dynamic_hp_thread_control_block>;
316 using hazard_pointer =
typename base::hazard_pointer;
319 void gather_protected_pointers(std::vector<const detail::deletable_object*>& protected_ptrs)
const
321 gather_protected_pointers(*
this, protected_ptrs);
325 struct alignas(64) hazard_pointer_block : aligned_object<hazard_pointer_block>
327 hazard_pointer_block(
size_t size) : size(size) {}
329 hazard_pointer* begin() {
return reinterpret_cast<hazard_pointer*
>(
this + 1); }
330 hazard_pointer* end() {
return begin() + size; }
332 const hazard_pointer* begin()
const {
return reinterpret_cast<const hazard_pointer*
>(
this + 1); }
333 const hazard_pointer* end()
const {
return begin() + size; }
335 const hazard_pointer_block* next_block()
const {
return next; }
336 hazard_pointer* initialize_next_block() {
return next ? base::initialize_block(*next) : nullptr; }
338 hazard_pointer_block* next =
nullptr;
342 const hazard_pointer_block* next_block()
const
345 return hp_block.load(std::memory_order_acquire);
347 size_t number_of_hps()
const {
return total_number_of_hps; }
348 hazard_pointer* need_more_hps() {
return allocate_new_hazard_pointer_block(); }
351 hazard_pointer* initialize_next_block()
353 auto block = hp_block.load(std::memory_order_relaxed);
354 return block ? base::initialize_block(*block) : nullptr;
357 template <
typename T>
358 static void gather_protected_pointers(
const T& block, std::vector<const detail::deletable_object*>& protected_ptrs)
360 base::gather_protected_pointers(protected_ptrs, block.begin(), block.end());
362 auto next = block.next_block();
364 gather_protected_pointers(*next, protected_ptrs);
367 static detail::deletable_object* as_internal_pointer(hazard_pointer* p)
371 auto marked =
reinterpret_cast<size_t>(p) | 1;
372 return reinterpret_cast<detail::deletable_object*
>(marked);
375 hazard_pointer* allocate_new_hazard_pointer_block()
377 size_t hps = std::max(
static_cast<size_t>(Strategy::K), total_number_of_hps / 2);
378 total_number_of_hps += hps;
379 Strategy::number_of_active_hps.fetch_add(hps, std::memory_order_relaxed);
381 size_t buffer_size =
sizeof(hazard_pointer_block) + hps *
sizeof(hazard_pointer);
382 void* buffer = hazard_pointer_block::operator
new(buffer_size);
383 auto block = ::new(buffer) hazard_pointer_block(hps);
384 auto result = this->initialize_block(*block);
385 block->next = hp_block.load(std::memory_order_relaxed);
387 hp_block.store(block, std::memory_order_release);
391 size_t total_number_of_hps = Strategy::K;
392 std::atomic<hazard_pointer_block*> hp_block;
396 template <
class Traits>
397 struct alignas(64) hazard_pointer<Traits>::thread_data : aligned_object<thread_data>
399 using HP =
typename thread_control_block::hazard_pointer*;
403 if (retire_list !=
nullptr)
406 if (retire_list !=
nullptr)
407 global_thread_block_list.abandon_retired_nodes(retire_list);
408 retire_list =
nullptr;
411 if (control_block !=
nullptr) {
412 global_thread_block_list.release_entry(control_block);
413 control_block =
nullptr;
417 HP alloc_hazard_pointer()
419 ensure_has_control_block();
420 return control_block->alloc_hazard_pointer(hint);
423 void release_hazard_pointer(HP& hp)
425 control_block->release_hazard_pointer(hp, hint);
428 std::size_t add_retired_node(detail::deletable_object* p)
430 p->next = retire_list;
432 return ++number_of_retired_nodes;
437 std::vector<const detail::deletable_object*> protected_pointers;
438 protected_pointers.reserve(allocation_strategy::number_of_active_hazard_pointers());
441 std::atomic_thread_fence(std::memory_order_seq_cst);
443 auto adopted_nodes = global_thread_block_list.adopt_abandoned_retired_nodes();
445 std::for_each(global_thread_block_list.begin(), global_thread_block_list.end(),
446 [&protected_pointers](
const auto& entry)
450 constexpr auto memory_order = TSAN_MEMORY_ORDER(std::memory_order_acquire, std::memory_order_relaxed);
451 if (entry.is_active(memory_order))
452 entry.gather_protected_pointers(protected_pointers);
456 std::atomic_thread_fence(std::memory_order_acquire);
458 std::sort(protected_pointers.begin(), protected_pointers.end());
460 auto list = retire_list;
461 retire_list =
nullptr;
462 number_of_retired_nodes = 0;
463 reclaim_nodes(list, protected_pointers);
464 reclaim_nodes(adopted_nodes, protected_pointers);
468 void ensure_has_control_block()
470 if (control_block ==
nullptr)
472 control_block = global_thread_block_list.acquire_entry();
473 control_block->initialize(hint);
477 void reclaim_nodes(detail::deletable_object* list,
478 const std::vector<const detail::deletable_object*>& protected_pointers)
480 while (list !=
nullptr)
485 if (std::binary_search(protected_pointers.begin(), protected_pointers.end(), cur))
486 add_retired_node(cur);
491 detail::deletable_object* retire_list =
nullptr;
492 std::size_t number_of_retired_nodes = 0;
493 typename thread_control_block::hint hint{};
495 thread_control_block* control_block =
nullptr;
497 friend class hazard_pointer;
498 ALLOCATION_COUNTER(hazard_pointer);
501#ifdef TRACK_ALLOCATIONS
502 template <
class Traits>
503 inline void hazard_pointer<Traits>::count_allocation()
504 { local_thread_data.allocation_counter.count_allocation(); }
506 template <
class Traits>
507 inline void hazard_pointer<Traits>::count_reclamation()
508 { local_thread_data.allocation_counter.count_reclamation(); }