skip_list.hpp 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712
  1. /* Declarations for the skip list template type.
  2. This file is part of xrcu.
  3. xrcu is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU General Public License as published by
  5. the Free Software Foundation; either version 3 of the License, or
  6. (at your option) any later version.
  7. This program is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU General Public License for more details.
  11. You should have received a copy of the GNU General Public License
  12. along with this program. If not, see <https://www.gnu.org/licenses/>. */
  13. #ifndef __SKIP_LIST_HPP__
  14. #define __SKIP_LIST_HPP__ 1
  15. #include "xrcu.hpp"
  16. #include "xatomic.hpp"
  17. #include "optional.hpp"
  18. #include "utils.hpp"
  19. #include <cstddef>
  20. #include <atomic>
  21. #include <functional>
  22. #include <initializer_list>
  23. namespace std
  24. {
  25. struct forward_iterator_tag;
  26. }
  27. namespace xrcu
  28. {
  29. namespace detail
  30. {
  31. struct sl_node_base;
  32. void sl_dealloc_node (void *base);
  33. sl_node_base* sl_alloc_node (unsigned int lvl, size_t size, uintptr_t **outpp);
  34. static const uintptr_t SL_XBIT = 1;
  35. static const unsigned int SL_MAX_DEPTH = 24;
  36. static const int SL_UNLINK_SKIP = -1;
  37. static const int SL_UNLINK_NONE = 0;
  38. static const int SL_UNLINK_ASSIST = 1;
  39. static const int SL_UNLINK_FORCE = 2;
  40. struct sl_node_base : public finalizable
  41. {
  42. unsigned int nlvl;
  43. uintptr_t *next;
  44. sl_node_base (unsigned int lvl, uintptr_t *np) : nlvl (lvl), next (np)
  45. {
  46. }
  47. static sl_node_base* get (uintptr_t addr)
  48. {
  49. return ((sl_node_base *)(addr & ~SL_XBIT));
  50. }
  51. static uintptr_t& at (uintptr_t addr, unsigned int lvl)
  52. {
  53. return (sl_node_base::get(addr)->next[lvl]);
  54. }
  55. static uintptr_t* plen (uintptr_t addr)
  56. {
  57. return (sl_node_base::get(addr)->next - 1);
  58. }
  59. static void bump (uintptr_t *lenp, intptr_t off)
  60. {
  61. xatomic_add (lenp, off + off);
  62. }
  63. static sl_node_base* make_root (unsigned int depth)
  64. {
  65. uintptr_t *np;
  66. auto ret = sl_alloc_node (depth + 1, sizeof (sl_node_base), &np);
  67. return (new (ret) sl_node_base (depth, np + 1));
  68. }
  69. static unsigned int
  70. rand_lvl (std::atomic<size_t>& hw)
  71. {
  72. size_t lvl = ctz (xrand ()) * 2 / 3;
  73. if (lvl == 0)
  74. return (1);
  75. while (true)
  76. {
  77. auto prev = hw.load (std::memory_order_relaxed);
  78. if (lvl <= prev)
  79. return (lvl);
  80. else if (prev == SL_MAX_DEPTH ||
  81. hw.compare_exchange_weak (prev, prev + 1,
  82. std::memory_order_acq_rel, std::memory_order_relaxed))
  83. return (prev);
  84. xatomic_spin_nop ();
  85. }
  86. }
  87. void safe_destroy ()
  88. {
  89. sl_dealloc_node (this);
  90. }
  91. };
  92. template <class T>
  93. struct sl_node : public sl_node_base
  94. {
  95. T key;
  96. sl_node (unsigned int lvl, uintptr_t *np, const T& kv) :
  97. sl_node_base (lvl, np), key (kv)
  98. {
  99. }
  100. template <class ...Args>
  101. sl_node (unsigned int lvl, uintptr_t *np, Args... args) :
  102. sl_node_base (lvl, np), key (std::forward<Args&&>(args)...)
  103. {
  104. }
  105. static sl_node<T>* copy (unsigned int lvl, const T& k)
  106. {
  107. uintptr_t *np;
  108. auto self = (sl_node<T> *)sl_alloc_node (lvl, sizeof (sl_node<T>), &np);
  109. try
  110. {
  111. return (new (self) sl_node<T> (lvl, np, k));
  112. }
  113. catch (...)
  114. {
  115. sl_dealloc_node (self);
  116. throw;
  117. }
  118. }
  119. template <class ...Args>
  120. static sl_node<T>* move (unsigned int lvl, Args... args)
  121. {
  122. uintptr_t *np;
  123. auto self = (sl_node<T> *)sl_alloc_node (lvl, sizeof (sl_node<T>), &np);
  124. return (new (self) sl_node<T> (lvl, np, std::forward<Args&&>(args)...));
  125. }
  126. void safe_destroy ()
  127. {
  128. destroy<T> (&this->key);
  129. sl_dealloc_node (this);
  130. }
  131. };
  132. inline void
  133. init_preds_succs (uintptr_t *p1, uintptr_t *p2)
  134. {
  135. for (unsigned int i = 0; i < SL_MAX_DEPTH; ++i)
  136. p1[i] = p2[i] = 0;
  137. }
  138. } // namespace detail
  139. template <class T, class Cmp = std::less<T> >
  140. struct skip_list
  141. {
  142. std::atomic<detail::sl_node_base *> head;
  143. Cmp cmpfn;
  144. unsigned int max_depth;
  145. std::atomic<size_t> hi_water { 1 };
  146. typedef T value_type;
  147. typedef T key_type;
  148. typedef Cmp key_compare;
  149. typedef Cmp value_compare;
  150. typedef size_t size_type;
  151. typedef ptrdiff_t difference_type;
  152. typedef T& reference;
  153. typedef const T& const_reference;
  154. typedef T* pointer;
  155. typedef const T* const_pointer;
  156. typedef skip_list<T, Cmp> _Self;
  157. typedef detail::sl_node<T> _Node;
  158. uintptr_t _Head () const
  159. {
  160. return ((uintptr_t)this->head.load (std::memory_order_relaxed));
  161. }
  162. size_t _Hiwater () const
  163. {
  164. return (this->hi_water.load (std::memory_order_relaxed));
  165. }
  166. void _Init (Cmp c, unsigned int depth)
  167. {
  168. this->cmpfn = c;
  169. if ((this->max_depth = depth) > detail::SL_MAX_DEPTH)
  170. this->max_depth = detail::SL_MAX_DEPTH;
  171. this->head.store (_Node::make_root (this->max_depth + 1),
  172. std::memory_order_relaxed);
  173. }
  174. skip_list (Cmp c = Cmp (), unsigned int depth = detail::SL_MAX_DEPTH)
  175. {
  176. this->_Init (c, depth);
  177. }
  178. template <class Iter>
  179. skip_list (Iter first, Iter last,
  180. Cmp c = Cmp (), unsigned int depth = detail::SL_MAX_DEPTH)
  181. {
  182. this->_Init (c, depth);
  183. for (; first != last; ++first)
  184. this->insert (*first);
  185. }
  186. skip_list (std::initializer_list<T> lst,
  187. Cmp c = Cmp (), unsigned int depth = detail::SL_MAX_DEPTH) :
  188. skip_list (lst.begin (), lst.end (), c, depth)
  189. {
  190. }
  191. skip_list (const _Self& right) : skip_list (right.begin (), right.end ())
  192. {
  193. }
  194. skip_list (_Self&& right)
  195. {
  196. this->head.store (right.head.load (std::memory_order_relaxed),
  197. std::memory_order_relaxed);
  198. this->cmpfn = right.cmpfn;
  199. this->max_depth = right.max_depth;
  200. this->hi_water.store (right.hi_water.load (std::memory_order_relaxed),
  201. std::memory_order_relaxed);
  202. right.head.store (nullptr, std::memory_order_relaxed);
  203. }
  204. struct iterator : public cs_guard
  205. {
  206. uintptr_t node;
  207. typedef std::forward_iterator_tag iterator_category;
  208. iterator () : node (0)
  209. {
  210. }
  211. iterator (uintptr_t addr) : node (addr)
  212. {
  213. }
  214. iterator (const iterator& right) : node (right.node)
  215. {
  216. }
  217. iterator (iterator&& right) : node (right.node)
  218. {
  219. right.node = 0;
  220. }
  221. iterator& operator++ ()
  222. {
  223. while (true)
  224. {
  225. if (this->node == 0)
  226. break;
  227. this->node = detail::sl_node_base::at (this->node, 0);
  228. if ((this->node & detail::SL_XBIT) == 0)
  229. break;
  230. }
  231. return (*this);
  232. }
  233. iterator operator++ (int)
  234. {
  235. iterator tmp { this->node };
  236. ++*this;
  237. return (tmp);
  238. }
  239. const T& operator* () const
  240. {
  241. return (skip_list<T, Cmp>::_Getk (this->node));
  242. }
  243. const T* operator-> () const
  244. {
  245. return (&**this);
  246. }
  247. iterator& operator= (const iterator& right)
  248. {
  249. this->node = right.node;
  250. return (*this);
  251. }
  252. iterator& operator= (iterator&& right)
  253. {
  254. this->node = right.node;
  255. right.node = 0;
  256. return (*this);
  257. }
  258. bool operator== (const iterator& right) const
  259. {
  260. return (this->node == right.node);
  261. }
  262. bool operator!= (const iterator& right) const
  263. {
  264. return (this->node != right.node);
  265. }
  266. };
  267. typedef iterator const_iterator;
  268. static const T& _Getk (uintptr_t addr)
  269. {
  270. return (((_Node *)_Node::get(addr))->key);
  271. }
  272. uintptr_t _Find_preds (int n, const T& key, int unlink,
  273. uintptr_t *preds = nullptr, uintptr_t *succs = nullptr,
  274. uintptr_t *outp = nullptr) const
  275. {
  276. bool got = false;
  277. uintptr_t pr = this->_Head (), it = 0;
  278. if (outp)
  279. *outp = pr;
  280. for (int lvl = (int)this->_Hiwater () - 1; lvl >= 0; --lvl)
  281. {
  282. uintptr_t next = _Node::at (pr, lvl);
  283. if (next == 0 && lvl >= n)
  284. continue;
  285. else if (next & detail::SL_XBIT)
  286. return (this->_Find_preds (n, key, unlink, preds, succs, outp));
  287. for (it = next; it != 0; )
  288. {
  289. next = _Node::at (it, lvl);
  290. while (next & detail::SL_XBIT)
  291. {
  292. if (unlink == detail::SL_UNLINK_SKIP ||
  293. unlink == detail::SL_UNLINK_NONE)
  294. { // Skip logically deleted elements.
  295. if ((it = next & ~detail::SL_XBIT) == 0)
  296. break;
  297. next = _Node::at (it, lvl);
  298. }
  299. else
  300. {
  301. uintptr_t qx = xatomic_cas (&_Node::at(pr, lvl),
  302. it, next & ~detail::SL_XBIT);
  303. if (qx == it)
  304. it = next & ~detail::SL_XBIT;
  305. else
  306. {
  307. if (qx & detail::SL_XBIT)
  308. return (this->_Find_preds (n,
  309. key, unlink, preds, succs, outp));
  310. it = qx;
  311. }
  312. next = it ? _Node::at (it, lvl) : 0;
  313. }
  314. }
  315. if (it == 0 || this->cmpfn (key, _Self::_Getk (it)) ||
  316. (unlink != detail::SL_UNLINK_FORCE &&
  317. (got = !this->cmpfn (_Self::_Getk (it), key))))
  318. break;
  319. pr = it, it = next;
  320. }
  321. if (preds)
  322. preds[lvl] = pr, succs[lvl] = it;
  323. }
  324. return (got || unlink == detail::SL_UNLINK_SKIP ? it : 0);
  325. }
  326. optional<T> find (const T& key) const
  327. {
  328. cs_guard g;
  329. uintptr_t rv = this->_Find_preds (0, key, detail::SL_UNLINK_NONE);
  330. return (rv ? optional<T> (this->_Getk (rv)) : optional<T> ());
  331. }
  332. bool contains (const T& key) const
  333. {
  334. cs_guard g;
  335. return (this->_Find_preds (0, key, detail::SL_UNLINK_NONE) != 0);
  336. }
  337. const_iterator lower_bound (const T& key) const
  338. {
  339. uintptr_t preds[detail::SL_MAX_DEPTH], succs[detail::SL_MAX_DEPTH];
  340. detail::init_preds_succs (preds, succs);
  341. cs_guard g;
  342. this->_Find_preds (0, key, detail::SL_UNLINK_NONE, preds, succs);
  343. uintptr_t it_val;
  344. for (auto val : preds)
  345. if ((it_val = val) != 0)
  346. break;
  347. const_iterator ret { it_val };
  348. if (ret.node == this->_Head ())
  349. ++ret;
  350. return (ret);
  351. }
  352. const_iterator upper_bound (const T& key) const
  353. {
  354. cs_guard g;
  355. uintptr_t it = this->_Find_preds (0, key, detail::SL_UNLINK_SKIP);
  356. const_iterator ret { it };
  357. if (it)
  358. ++ret;
  359. return (ret);
  360. }
  361. bool _Insert (const T& key)
  362. {
  363. uintptr_t xroot;
  364. uintptr_t preds[detail::SL_MAX_DEPTH], succs[detail::SL_MAX_DEPTH];
  365. detail::init_preds_succs (preds, succs);
  366. size_t n = _Node::rand_lvl (this->hi_water);
  367. if (this->_Find_preds (n, key, detail::SL_UNLINK_ASSIST,
  368. preds, succs, &xroot) != 0)
  369. return (false);
  370. uintptr_t nv = (uintptr_t)_Node::copy (n, key);
  371. for (size_t lvl = 0; lvl < n; ++lvl)
  372. _Node::at(nv, lvl) = succs[lvl];
  373. uintptr_t pred = *preds;
  374. if (!xatomic_cas_bool (&_Node::at(pred, 0), *succs, nv))
  375. {
  376. _Node::get(nv)->safe_destroy ();
  377. return (this->_Insert (key));
  378. }
  379. for (size_t lvl = 1; lvl < n; ++lvl)
  380. while (true)
  381. {
  382. pred = preds[lvl];
  383. if (xatomic_cas_bool (&_Node::at(pred, lvl), succs[lvl], nv))
  384. break; // Successful link.
  385. this->_Find_preds (n, key, detail::SL_UNLINK_ASSIST, preds, succs);
  386. for (size_t ix = lvl; ix < n; ++ix)
  387. if ((pred = _Node::at (nv, ix)) == succs[ix])
  388. continue;
  389. else if (xatomic_cas (&_Node::at(nv, ix),
  390. pred, succs[ix]) & detail::SL_XBIT)
  391. { // Another thread is removing this very key - Bail out.
  392. this->_Find_preds (0, key, detail::SL_UNLINK_FORCE);
  393. return (false);
  394. }
  395. }
  396. if (_Node::at (nv, n - 1) & detail::SL_XBIT)
  397. { // Another thread is removing this key - Make sure it's unlinked.
  398. this->_Find_preds (0, key, detail::SL_UNLINK_FORCE);
  399. return (false);
  400. }
  401. _Node::bump (_Node::plen (xroot), 1);
  402. return (true);
  403. }
  404. bool insert (const T& key)
  405. {
  406. cs_guard g;
  407. return (this->_Insert (key));
  408. }
  409. uintptr_t _Erase (const T& key)
  410. {
  411. uintptr_t xroot, it = this->_Find_preds (this->_Hiwater (), key,
  412. detail::SL_UNLINK_NONE,
  413. nullptr, nullptr, &xroot);
  414. if (it == 0)
  415. return (it);
  416. detail::sl_node_base *nodep = _Node::get (it);
  417. uintptr_t qx = 0, next = 0;
  418. for (int lvl = nodep->nlvl - 1; lvl >= 0; --lvl)
  419. {
  420. qx = nodep->next[lvl];
  421. do
  422. {
  423. next = qx;
  424. qx = xatomic_cas (&nodep->next[lvl],
  425. next, next | detail::SL_XBIT);
  426. if (qx & detail::SL_XBIT)
  427. {
  428. if (lvl == 0)
  429. return (0);
  430. break;
  431. }
  432. }
  433. while (next != qx);
  434. }
  435. // Unlink the item.
  436. this->_Find_preds (0, key, detail::SL_UNLINK_FORCE);
  437. _Node::bump (_Node::plen (xroot), -1);
  438. finalize (nodep);
  439. return (it);
  440. }
  441. bool erase (const T& key)
  442. {
  443. cs_guard g;
  444. return (this->_Erase (key) != 0);
  445. }
  446. optional<T> remove (const T& key)
  447. {
  448. cs_guard g;
  449. uintptr_t it = this->_Erase (key);
  450. return (it ? optional<T> (_Self::_Getk (it)) : optional<T> ());
  451. }
  452. const_iterator cbegin () const
  453. {
  454. return (iterator (_Node::at (this->_Head (), 0)));
  455. }
  456. iterator begin ()
  457. {
  458. return (this->cbegin ());
  459. }
  460. const_iterator begin () const
  461. {
  462. return (this->cbegin ());
  463. }
  464. iterator end ()
  465. {
  466. return (this->cend ());
  467. }
  468. const_iterator end () const
  469. {
  470. return (this->cend ());
  471. }
  472. const_iterator cend () const
  473. {
  474. return (iterator (0));
  475. }
  476. size_t size () const
  477. {
  478. cs_guard g;
  479. uintptr_t *p = this->head.load (std::memory_order_relaxed)->next - 1;
  480. return (*p >> 1);
  481. }
  482. size_t max_size () const
  483. {
  484. return ((~(size_t)0) >> 1);
  485. }
  486. bool empty () const
  487. {
  488. return (this->size () == 0);
  489. }
  490. template <bool Destroy = false>
  491. void _Fini_root (detail::sl_node_base *xroot)
  492. {
  493. for (uintptr_t run = (uintptr_t)xroot; run != 0; )
  494. {
  495. uintptr_t next = _Node::at (run, 0);
  496. if (Destroy)
  497. _Node::get(run)->safe_destroy ();
  498. else
  499. finalize (_Node::get (run));
  500. run = next;
  501. }
  502. }
  503. void _Lock_root ()
  504. {
  505. cs_guard g;
  506. while (true)
  507. {
  508. auto ptr = _Node::plen ((uintptr_t)
  509. this->head.load (std::memory_order_relaxed));
  510. auto val = *ptr;
  511. if ((val & 1) == 0 && xatomic_cas_bool (ptr, val, val | 1))
  512. break;
  513. xatomic_spin_nop ();
  514. }
  515. }
  516. template <class Iter>
  517. void assign (Iter first, Iter last)
  518. {
  519. _Self tmp (first, last);
  520. auto tp = tmp.head.load (std::memory_order_relaxed);
  521. tmp.head.store (this->head.exchange (tp, std::memory_order_acq_rel));
  522. }
  523. void assign (std::initializer_list<T> lst)
  524. {
  525. this->assign (lst.begin (), lst.end ());
  526. }
  527. _Self& operator= (const _Self& right)
  528. {
  529. this->assign (right.begin (), right.end ());
  530. return (*this);
  531. }
  532. _Self& operator= (_Self&& right)
  533. {
  534. auto tp = right.head.load (std::memory_order_relaxed);
  535. this->_Fini_root<> (this->head.exchange (tp, std::memory_order_acq_rel));
  536. right.head.store (nullptr, std::memory_order_relaxed);
  537. }
  538. void swap (_Self& right)
  539. {
  540. if (this == &right)
  541. return;
  542. this->_Lock_root ();
  543. right._Lock_root ();
  544. auto lw = this->hi_water.load (std::memory_order_relaxed);
  545. auto rw = right.hi_water.load (std::memory_order_relaxed);
  546. this->hi_water.store (rw, std::memory_order_relaxed);
  547. right.hi_water.store (lw, std::memory_order_relaxed);
  548. auto lh = this->head.load (std::memory_order_relaxed);
  549. auto rh = right.head.load (std::memory_order_relaxed);
  550. this->head.store (rh, std::memory_order_relaxed);
  551. right.head.store (lh, std::memory_order_relaxed);
  552. xatomic_and (_Node::plen ((uintptr_t)rh), ~(uintptr_t)1);
  553. xatomic_and (_Node::plen ((uintptr_t)lh), ~(uintptr_t)1);
  554. }
  555. void clear ()
  556. {
  557. auto xroot = _Node::make_root (this->max_depth);
  558. auto prev = this->head.exchange (xroot, std::memory_order_release);
  559. this->_Fini_root<> (prev);
  560. }
  561. ~skip_list ()
  562. {
  563. this->_Fini_root<true> (this->head.load (std::memory_order_relaxed));
  564. }
  565. };
  566. } // namespace xrcu
  567. namespace std
  568. {
  569. template <class T, class Cmp>
  570. void swap (xrcu::skip_list<T, Cmp>& left,
  571. xrcu::skip_list<T, Cmp>& right)
  572. {
  573. left.swap (right);
  574. }
  575. } // namespace std
  576. #endif