stack.cpp 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. #include "stack.hpp"
  2. #include "xatomic.hpp"
  3. #include <atomic>
  4. namespace xrcu
  5. {
  6. namespace detail
  7. {
  8. static const uintptr_t SPIN_BIT = 1;
  9. static inline bool
  10. node_spinning_p (const stack_node_base *np)
  11. {
  12. return (((uintptr_t)np) & SPIN_BIT);
  13. }
  14. static inline stack_node_base*
  15. get_node (const stack_node_base::ptr_type& head)
  16. {
  17. return (head.load (std::memory_order_relaxed));
  18. }
  19. stack_node_base* stack_node_base::root (const ptr_type& head)
  20. {
  21. auto ret = (uintptr_t)get_node (head);
  22. return (((stack_node_base *)(ret & ~SPIN_BIT)));
  23. }
  24. void stack_node_base::push (ptr_type& head, stack_node_base *nodep)
  25. {
  26. while (true)
  27. {
  28. nodep->next = get_node (head);
  29. if (!node_spinning_p (nodep->next) &&
  30. head.compare_exchange_weak (nodep->next, nodep,
  31. std::memory_order_acq_rel, std::memory_order_relaxed))
  32. break;
  33. xatomic_spin_nop ();
  34. }
  35. }
  36. void stack_node_base::push (ptr_type& head,
  37. stack_node_base *nodep, stack_node_base **outp)
  38. {
  39. while (true)
  40. {
  41. auto tmp = get_node (head);
  42. if (!node_spinning_p (tmp))
  43. {
  44. *outp = tmp;
  45. if (head.compare_exchange_weak (tmp, nodep,
  46. std::memory_order_acq_rel, std::memory_order_relaxed))
  47. break;
  48. }
  49. xatomic_spin_nop ();
  50. }
  51. }
  52. stack_node_base* stack_node_base::pop (ptr_type& head)
  53. {
  54. while (true)
  55. {
  56. auto nodep = get_node (head);
  57. if (node_spinning_p (nodep))
  58. ;
  59. else if (!nodep)
  60. return (nodep);
  61. else if (head.compare_exchange_weak (nodep, nodep->next,
  62. std::memory_order_acq_rel, std::memory_order_relaxed))
  63. {
  64. nodep->next = nullptr;
  65. return (nodep);
  66. }
  67. xatomic_spin_nop ();
  68. }
  69. }
  70. static inline stack_node_base*
  71. set_spin (stack_node_base::ptr_type& head)
  72. {
  73. while (true)
  74. {
  75. auto tmp = get_node (head);
  76. if (!node_spinning_p (tmp) &&
  77. head.compare_exchange_weak (tmp,
  78. (stack_node_base *)((uintptr_t)tmp | SPIN_BIT),
  79. std::memory_order_acq_rel, std::memory_order_relaxed))
  80. return (tmp);
  81. xatomic_spin_nop ();
  82. }
  83. }
  84. void stack_node_base::swap (ptr_type& h1, ptr_type& h2)
  85. {
  86. // Prevent any further modifications.
  87. auto ln = set_spin (h1), rn = set_spin (h2);
  88. // Swap the root nodes.
  89. h1.store (rn, std::memory_order_release);
  90. h2.store (ln, std::memory_order_release);
  91. }
  92. stack_node_base* stack_node_base::clear (ptr_type& head)
  93. {
  94. auto ret = set_spin (head);
  95. head.store (nullptr, std::memory_order_release);
  96. return (ret);
  97. }
  98. size_t stack_node_base::size (const ptr_type& head)
  99. {
  100. auto runp = get_node (head);
  101. size_t ret = 0;
  102. for (; runp; runp = runp->next, ++ret) ;
  103. return (ret);
  104. }
  105. } } // namespaces