atomic.h 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /* Atomic operations usable in machine independent code */
  3. #ifndef _LINUX_ATOMIC_H
  4. #define _LINUX_ATOMIC_H
  5. #include <linux/types.h>
  6. #include <asm/atomic.h>
  7. #include <asm/barrier.h>
  8. /*
  9. * Relaxed variants of xchg, cmpxchg and some atomic operations.
  10. *
  11. * We support four variants:
  12. *
  13. * - Fully ordered: The default implementation, no suffix required.
  14. * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
  15. * - Release: Provides RELEASE semantics, _release suffix.
  16. * - Relaxed: No ordering guarantees, _relaxed suffix.
  17. *
  18. * For compound atomics performing both a load and a store, ACQUIRE
  19. * semantics apply only to the load and RELEASE semantics only to the
  20. * store portion of the operation. Note that a failed cmpxchg_acquire
  21. * does -not- imply any memory ordering constraints.
  22. *
  23. * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
  24. */
  25. #ifndef atomic_read_acquire
  26. #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
  27. #endif
  28. #ifndef atomic_set_release
  29. #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
  30. #endif
  31. /*
  32. * The idea here is to build acquire/release variants by adding explicit
  33. * barriers on top of the relaxed variant. In the case where the relaxed
  34. * variant is already fully ordered, no additional barriers are needed.
  35. *
  36. * If an architecture overrides __atomic_acquire_fence() it will probably
  37. * want to define smp_mb__after_spinlock().
  38. */
  39. #ifndef __atomic_acquire_fence
  40. #define __atomic_acquire_fence smp_mb__after_atomic
  41. #endif
  42. #ifndef __atomic_release_fence
  43. #define __atomic_release_fence smp_mb__before_atomic
  44. #endif
  45. #ifndef __atomic_pre_full_fence
  46. #define __atomic_pre_full_fence smp_mb__before_atomic
  47. #endif
  48. #ifndef __atomic_post_full_fence
  49. #define __atomic_post_full_fence smp_mb__after_atomic
  50. #endif
  51. #define __atomic_op_acquire(op, args...) \
  52. ({ \
  53. typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
  54. __atomic_acquire_fence(); \
  55. __ret; \
  56. })
  57. #define __atomic_op_release(op, args...) \
  58. ({ \
  59. __atomic_release_fence(); \
  60. op##_relaxed(args); \
  61. })
  62. #define __atomic_op_fence(op, args...) \
  63. ({ \
  64. typeof(op##_relaxed(args)) __ret; \
  65. __atomic_pre_full_fence(); \
  66. __ret = op##_relaxed(args); \
  67. __atomic_post_full_fence(); \
  68. __ret; \
  69. })
  70. /* atomic_add_return_relaxed */
  71. #ifndef atomic_add_return_relaxed
  72. #define atomic_add_return_relaxed atomic_add_return
  73. #define atomic_add_return_acquire atomic_add_return
  74. #define atomic_add_return_release atomic_add_return
  75. #else /* atomic_add_return_relaxed */
  76. #ifndef atomic_add_return_acquire
  77. #define atomic_add_return_acquire(...) \
  78. __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
  79. #endif
  80. #ifndef atomic_add_return_release
  81. #define atomic_add_return_release(...) \
  82. __atomic_op_release(atomic_add_return, __VA_ARGS__)
  83. #endif
  84. #ifndef atomic_add_return
  85. #define atomic_add_return(...) \
  86. __atomic_op_fence(atomic_add_return, __VA_ARGS__)
  87. #endif
  88. #endif /* atomic_add_return_relaxed */
  89. #ifndef atomic_inc
  90. #define atomic_inc(v) atomic_add(1, (v))
  91. #endif
  92. /* atomic_inc_return_relaxed */
  93. #ifndef atomic_inc_return_relaxed
  94. #ifndef atomic_inc_return
  95. #define atomic_inc_return(v) atomic_add_return(1, (v))
  96. #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
  97. #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
  98. #define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
  99. #else /* atomic_inc_return */
  100. #define atomic_inc_return_relaxed atomic_inc_return
  101. #define atomic_inc_return_acquire atomic_inc_return
  102. #define atomic_inc_return_release atomic_inc_return
  103. #endif /* atomic_inc_return */
  104. #else /* atomic_inc_return_relaxed */
  105. #ifndef atomic_inc_return_acquire
  106. #define atomic_inc_return_acquire(...) \
  107. __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
  108. #endif
  109. #ifndef atomic_inc_return_release
  110. #define atomic_inc_return_release(...) \
  111. __atomic_op_release(atomic_inc_return, __VA_ARGS__)
  112. #endif
  113. #ifndef atomic_inc_return
  114. #define atomic_inc_return(...) \
  115. __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
  116. #endif
  117. #endif /* atomic_inc_return_relaxed */
  118. /* atomic_sub_return_relaxed */
  119. #ifndef atomic_sub_return_relaxed
  120. #define atomic_sub_return_relaxed atomic_sub_return
  121. #define atomic_sub_return_acquire atomic_sub_return
  122. #define atomic_sub_return_release atomic_sub_return
  123. #else /* atomic_sub_return_relaxed */
  124. #ifndef atomic_sub_return_acquire
  125. #define atomic_sub_return_acquire(...) \
  126. __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
  127. #endif
  128. #ifndef atomic_sub_return_release
  129. #define atomic_sub_return_release(...) \
  130. __atomic_op_release(atomic_sub_return, __VA_ARGS__)
  131. #endif
  132. #ifndef atomic_sub_return
  133. #define atomic_sub_return(...) \
  134. __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
  135. #endif
  136. #endif /* atomic_sub_return_relaxed */
  137. #ifndef atomic_dec
  138. #define atomic_dec(v) atomic_sub(1, (v))
  139. #endif
  140. /* atomic_dec_return_relaxed */
  141. #ifndef atomic_dec_return_relaxed
  142. #ifndef atomic_dec_return
  143. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  144. #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
  145. #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
  146. #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
  147. #else /* atomic_dec_return */
  148. #define atomic_dec_return_relaxed atomic_dec_return
  149. #define atomic_dec_return_acquire atomic_dec_return
  150. #define atomic_dec_return_release atomic_dec_return
  151. #endif /* atomic_dec_return */
  152. #else /* atomic_dec_return_relaxed */
  153. #ifndef atomic_dec_return_acquire
  154. #define atomic_dec_return_acquire(...) \
  155. __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
  156. #endif
  157. #ifndef atomic_dec_return_release
  158. #define atomic_dec_return_release(...) \
  159. __atomic_op_release(atomic_dec_return, __VA_ARGS__)
  160. #endif
  161. #ifndef atomic_dec_return
  162. #define atomic_dec_return(...) \
  163. __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
  164. #endif
  165. #endif /* atomic_dec_return_relaxed */
  166. /* atomic_fetch_add_relaxed */
  167. #ifndef atomic_fetch_add_relaxed
  168. #define atomic_fetch_add_relaxed atomic_fetch_add
  169. #define atomic_fetch_add_acquire atomic_fetch_add
  170. #define atomic_fetch_add_release atomic_fetch_add
  171. #else /* atomic_fetch_add_relaxed */
  172. #ifndef atomic_fetch_add_acquire
  173. #define atomic_fetch_add_acquire(...) \
  174. __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
  175. #endif
  176. #ifndef atomic_fetch_add_release
  177. #define atomic_fetch_add_release(...) \
  178. __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
  179. #endif
  180. #ifndef atomic_fetch_add
  181. #define atomic_fetch_add(...) \
  182. __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
  183. #endif
  184. #endif /* atomic_fetch_add_relaxed */
  185. /* atomic_fetch_inc_relaxed */
  186. #ifndef atomic_fetch_inc_relaxed
  187. #ifndef atomic_fetch_inc
  188. #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
  189. #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
  190. #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
  191. #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
  192. #else /* atomic_fetch_inc */
  193. #define atomic_fetch_inc_relaxed atomic_fetch_inc
  194. #define atomic_fetch_inc_acquire atomic_fetch_inc
  195. #define atomic_fetch_inc_release atomic_fetch_inc
  196. #endif /* atomic_fetch_inc */
  197. #else /* atomic_fetch_inc_relaxed */
  198. #ifndef atomic_fetch_inc_acquire
  199. #define atomic_fetch_inc_acquire(...) \
  200. __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
  201. #endif
  202. #ifndef atomic_fetch_inc_release
  203. #define atomic_fetch_inc_release(...) \
  204. __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
  205. #endif
  206. #ifndef atomic_fetch_inc
  207. #define atomic_fetch_inc(...) \
  208. __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
  209. #endif
  210. #endif /* atomic_fetch_inc_relaxed */
  211. /* atomic_fetch_sub_relaxed */
  212. #ifndef atomic_fetch_sub_relaxed
  213. #define atomic_fetch_sub_relaxed atomic_fetch_sub
  214. #define atomic_fetch_sub_acquire atomic_fetch_sub
  215. #define atomic_fetch_sub_release atomic_fetch_sub
  216. #else /* atomic_fetch_sub_relaxed */
  217. #ifndef atomic_fetch_sub_acquire
  218. #define atomic_fetch_sub_acquire(...) \
  219. __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
  220. #endif
  221. #ifndef atomic_fetch_sub_release
  222. #define atomic_fetch_sub_release(...) \
  223. __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
  224. #endif
  225. #ifndef atomic_fetch_sub
  226. #define atomic_fetch_sub(...) \
  227. __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
  228. #endif
  229. #endif /* atomic_fetch_sub_relaxed */
  230. /* atomic_fetch_dec_relaxed */
  231. #ifndef atomic_fetch_dec_relaxed
  232. #ifndef atomic_fetch_dec
  233. #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
  234. #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
  235. #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
  236. #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
  237. #else /* atomic_fetch_dec */
  238. #define atomic_fetch_dec_relaxed atomic_fetch_dec
  239. #define atomic_fetch_dec_acquire atomic_fetch_dec
  240. #define atomic_fetch_dec_release atomic_fetch_dec
  241. #endif /* atomic_fetch_dec */
  242. #else /* atomic_fetch_dec_relaxed */
  243. #ifndef atomic_fetch_dec_acquire
  244. #define atomic_fetch_dec_acquire(...) \
  245. __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
  246. #endif
  247. #ifndef atomic_fetch_dec_release
  248. #define atomic_fetch_dec_release(...) \
  249. __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
  250. #endif
  251. #ifndef atomic_fetch_dec
  252. #define atomic_fetch_dec(...) \
  253. __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
  254. #endif
  255. #endif /* atomic_fetch_dec_relaxed */
  256. /* atomic_fetch_or_relaxed */
  257. #ifndef atomic_fetch_or_relaxed
  258. #define atomic_fetch_or_relaxed atomic_fetch_or
  259. #define atomic_fetch_or_acquire atomic_fetch_or
  260. #define atomic_fetch_or_release atomic_fetch_or
  261. #else /* atomic_fetch_or_relaxed */
  262. #ifndef atomic_fetch_or_acquire
  263. #define atomic_fetch_or_acquire(...) \
  264. __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
  265. #endif
  266. #ifndef atomic_fetch_or_release
  267. #define atomic_fetch_or_release(...) \
  268. __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
  269. #endif
  270. #ifndef atomic_fetch_or
  271. #define atomic_fetch_or(...) \
  272. __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
  273. #endif
  274. #endif /* atomic_fetch_or_relaxed */
  275. /* atomic_fetch_and_relaxed */
  276. #ifndef atomic_fetch_and_relaxed
  277. #define atomic_fetch_and_relaxed atomic_fetch_and
  278. #define atomic_fetch_and_acquire atomic_fetch_and
  279. #define atomic_fetch_and_release atomic_fetch_and
  280. #else /* atomic_fetch_and_relaxed */
  281. #ifndef atomic_fetch_and_acquire
  282. #define atomic_fetch_and_acquire(...) \
  283. __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
  284. #endif
  285. #ifndef atomic_fetch_and_release
  286. #define atomic_fetch_and_release(...) \
  287. __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
  288. #endif
  289. #ifndef atomic_fetch_and
  290. #define atomic_fetch_and(...) \
  291. __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
  292. #endif
  293. #endif /* atomic_fetch_and_relaxed */
  294. #ifndef atomic_andnot
  295. #define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
  296. #endif
  297. #ifndef atomic_fetch_andnot_relaxed
  298. #ifndef atomic_fetch_andnot
  299. #define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
  300. #define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
  301. #define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
  302. #define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
  303. #else /* atomic_fetch_andnot */
  304. #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
  305. #define atomic_fetch_andnot_acquire atomic_fetch_andnot
  306. #define atomic_fetch_andnot_release atomic_fetch_andnot
  307. #endif /* atomic_fetch_andnot */
  308. #else /* atomic_fetch_andnot_relaxed */
  309. #ifndef atomic_fetch_andnot_acquire
  310. #define atomic_fetch_andnot_acquire(...) \
  311. __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
  312. #endif
  313. #ifndef atomic_fetch_andnot_release
  314. #define atomic_fetch_andnot_release(...) \
  315. __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
  316. #endif
  317. #ifndef atomic_fetch_andnot
  318. #define atomic_fetch_andnot(...) \
  319. __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
  320. #endif
  321. #endif /* atomic_fetch_andnot_relaxed */
  322. /* atomic_fetch_xor_relaxed */
  323. #ifndef atomic_fetch_xor_relaxed
  324. #define atomic_fetch_xor_relaxed atomic_fetch_xor
  325. #define atomic_fetch_xor_acquire atomic_fetch_xor
  326. #define atomic_fetch_xor_release atomic_fetch_xor
  327. #else /* atomic_fetch_xor_relaxed */
  328. #ifndef atomic_fetch_xor_acquire
  329. #define atomic_fetch_xor_acquire(...) \
  330. __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
  331. #endif
  332. #ifndef atomic_fetch_xor_release
  333. #define atomic_fetch_xor_release(...) \
  334. __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
  335. #endif
  336. #ifndef atomic_fetch_xor
  337. #define atomic_fetch_xor(...) \
  338. __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
  339. #endif
  340. #endif /* atomic_fetch_xor_relaxed */
  341. /* atomic_xchg_relaxed */
  342. #ifndef atomic_xchg_relaxed
  343. #define atomic_xchg_relaxed atomic_xchg
  344. #define atomic_xchg_acquire atomic_xchg
  345. #define atomic_xchg_release atomic_xchg
  346. #else /* atomic_xchg_relaxed */
  347. #ifndef atomic_xchg_acquire
  348. #define atomic_xchg_acquire(...) \
  349. __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
  350. #endif
  351. #ifndef atomic_xchg_release
  352. #define atomic_xchg_release(...) \
  353. __atomic_op_release(atomic_xchg, __VA_ARGS__)
  354. #endif
  355. #ifndef atomic_xchg
  356. #define atomic_xchg(...) \
  357. __atomic_op_fence(atomic_xchg, __VA_ARGS__)
  358. #endif
  359. #endif /* atomic_xchg_relaxed */
  360. /* atomic_cmpxchg_relaxed */
  361. #ifndef atomic_cmpxchg_relaxed
  362. #define atomic_cmpxchg_relaxed atomic_cmpxchg
  363. #define atomic_cmpxchg_acquire atomic_cmpxchg
  364. #define atomic_cmpxchg_release atomic_cmpxchg
  365. #else /* atomic_cmpxchg_relaxed */
  366. #ifndef atomic_cmpxchg_acquire
  367. #define atomic_cmpxchg_acquire(...) \
  368. __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
  369. #endif
  370. #ifndef atomic_cmpxchg_release
  371. #define atomic_cmpxchg_release(...) \
  372. __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
  373. #endif
  374. #ifndef atomic_cmpxchg
  375. #define atomic_cmpxchg(...) \
  376. __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
  377. #endif
  378. #endif /* atomic_cmpxchg_relaxed */
  379. #ifndef atomic_try_cmpxchg
  380. #define __atomic_try_cmpxchg(type, _p, _po, _n) \
  381. ({ \
  382. typeof(_po) __po = (_po); \
  383. typeof(*(_po)) __r, __o = *__po; \
  384. __r = atomic_cmpxchg##type((_p), __o, (_n)); \
  385. if (unlikely(__r != __o)) \
  386. *__po = __r; \
  387. likely(__r == __o); \
  388. })
  389. #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
  390. #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
  391. #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
  392. #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
  393. #else /* atomic_try_cmpxchg */
  394. #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
  395. #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
  396. #define atomic_try_cmpxchg_release atomic_try_cmpxchg
  397. #endif /* atomic_try_cmpxchg */
  398. /* cmpxchg_relaxed */
  399. #ifndef cmpxchg_relaxed
  400. #define cmpxchg_relaxed cmpxchg
  401. #define cmpxchg_acquire cmpxchg
  402. #define cmpxchg_release cmpxchg
  403. #else /* cmpxchg_relaxed */
  404. #ifndef cmpxchg_acquire
  405. #define cmpxchg_acquire(...) \
  406. __atomic_op_acquire(cmpxchg, __VA_ARGS__)
  407. #endif
  408. #ifndef cmpxchg_release
  409. #define cmpxchg_release(...) \
  410. __atomic_op_release(cmpxchg, __VA_ARGS__)
  411. #endif
  412. #ifndef cmpxchg
  413. #define cmpxchg(...) \
  414. __atomic_op_fence(cmpxchg, __VA_ARGS__)
  415. #endif
  416. #endif /* cmpxchg_relaxed */
  417. /* cmpxchg64_relaxed */
  418. #ifndef cmpxchg64_relaxed
  419. #define cmpxchg64_relaxed cmpxchg64
  420. #define cmpxchg64_acquire cmpxchg64
  421. #define cmpxchg64_release cmpxchg64
  422. #else /* cmpxchg64_relaxed */
  423. #ifndef cmpxchg64_acquire
  424. #define cmpxchg64_acquire(...) \
  425. __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
  426. #endif
  427. #ifndef cmpxchg64_release
  428. #define cmpxchg64_release(...) \
  429. __atomic_op_release(cmpxchg64, __VA_ARGS__)
  430. #endif
  431. #ifndef cmpxchg64
  432. #define cmpxchg64(...) \
  433. __atomic_op_fence(cmpxchg64, __VA_ARGS__)
  434. #endif
  435. #endif /* cmpxchg64_relaxed */
  436. /* xchg_relaxed */
  437. #ifndef xchg_relaxed
  438. #define xchg_relaxed xchg
  439. #define xchg_acquire xchg
  440. #define xchg_release xchg
  441. #else /* xchg_relaxed */
  442. #ifndef xchg_acquire
  443. #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
  444. #endif
  445. #ifndef xchg_release
  446. #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
  447. #endif
  448. #ifndef xchg
  449. #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
  450. #endif
  451. #endif /* xchg_relaxed */
  452. /**
  453. * atomic_fetch_add_unless - add unless the number is already a given value
  454. * @v: pointer of type atomic_t
  455. * @a: the amount to add to v...
  456. * @u: ...unless v is equal to u.
  457. *
  458. * Atomically adds @a to @v, if @v was not already @u.
  459. * Returns the original value of @v.
  460. */
  461. #ifndef atomic_fetch_add_unless
  462. static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
  463. {
  464. int c = atomic_read(v);
  465. do {
  466. if (unlikely(c == u))
  467. break;
  468. } while (!atomic_try_cmpxchg(v, &c, c + a));
  469. return c;
  470. }
  471. #endif
  472. /**
  473. * atomic_add_unless - add unless the number is already a given value
  474. * @v: pointer of type atomic_t
  475. * @a: the amount to add to v...
  476. * @u: ...unless v is equal to u.
  477. *
  478. * Atomically adds @a to @v, if @v was not already @u.
  479. * Returns true if the addition was done.
  480. */
  481. static inline bool atomic_add_unless(atomic_t *v, int a, int u)
  482. {
  483. return atomic_fetch_add_unless(v, a, u) != u;
  484. }
  485. /**
  486. * atomic_inc_not_zero - increment unless the number is zero
  487. * @v: pointer of type atomic_t
  488. *
  489. * Atomically increments @v by 1, if @v is non-zero.
  490. * Returns true if the increment was done.
  491. */
  492. #ifndef atomic_inc_not_zero
  493. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  494. #endif
  495. /**
  496. * atomic_inc_and_test - increment and test
  497. * @v: pointer of type atomic_t
  498. *
  499. * Atomically increments @v by 1
  500. * and returns true if the result is zero, or false for all
  501. * other cases.
  502. */
  503. #ifndef atomic_inc_and_test
  504. static inline bool atomic_inc_and_test(atomic_t *v)
  505. {
  506. return atomic_inc_return(v) == 0;
  507. }
  508. #endif
  509. /**
  510. * atomic_dec_and_test - decrement and test
  511. * @v: pointer of type atomic_t
  512. *
  513. * Atomically decrements @v by 1 and
  514. * returns true if the result is 0, or false for all other
  515. * cases.
  516. */
  517. #ifndef atomic_dec_and_test
  518. static inline bool atomic_dec_and_test(atomic_t *v)
  519. {
  520. return atomic_dec_return(v) == 0;
  521. }
  522. #endif
  523. /**
  524. * atomic_sub_and_test - subtract value from variable and test result
  525. * @i: integer value to subtract
  526. * @v: pointer of type atomic_t
  527. *
  528. * Atomically subtracts @i from @v and returns
  529. * true if the result is zero, or false for all
  530. * other cases.
  531. */
  532. #ifndef atomic_sub_and_test
  533. static inline bool atomic_sub_and_test(int i, atomic_t *v)
  534. {
  535. return atomic_sub_return(i, v) == 0;
  536. }
  537. #endif
  538. /**
  539. * atomic_add_negative - add and test if negative
  540. * @i: integer value to add
  541. * @v: pointer of type atomic_t
  542. *
  543. * Atomically adds @i to @v and returns true
  544. * if the result is negative, or false when
  545. * result is greater than or equal to zero.
  546. */
  547. #ifndef atomic_add_negative
  548. static inline bool atomic_add_negative(int i, atomic_t *v)
  549. {
  550. return atomic_add_return(i, v) < 0;
  551. }
  552. #endif
  553. #ifndef atomic_inc_unless_negative
  554. static inline bool atomic_inc_unless_negative(atomic_t *v)
  555. {
  556. int c = atomic_read(v);
  557. do {
  558. if (unlikely(c < 0))
  559. return false;
  560. } while (!atomic_try_cmpxchg(v, &c, c + 1));
  561. return true;
  562. }
  563. #endif
  564. #ifndef atomic_dec_unless_positive
  565. static inline bool atomic_dec_unless_positive(atomic_t *v)
  566. {
  567. int c = atomic_read(v);
  568. do {
  569. if (unlikely(c > 0))
  570. return false;
  571. } while (!atomic_try_cmpxchg(v, &c, c - 1));
  572. return true;
  573. }
  574. #endif
  575. /*
  576. * atomic_dec_if_positive - decrement by 1 if old value positive
  577. * @v: pointer of type atomic_t
  578. *
  579. * The function returns the old value of *v minus 1, even if
  580. * the atomic variable, v, was not decremented.
  581. */
  582. #ifndef atomic_dec_if_positive
  583. static inline int atomic_dec_if_positive(atomic_t *v)
  584. {
  585. int dec, c = atomic_read(v);
  586. do {
  587. dec = c - 1;
  588. if (unlikely(dec < 0))
  589. break;
  590. } while (!atomic_try_cmpxchg(v, &c, dec));
  591. return dec;
  592. }
  593. #endif
  594. #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
  595. #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
  596. #ifdef CONFIG_GENERIC_ATOMIC64
  597. #include <asm-generic/atomic64.h>
  598. #endif
  599. #ifndef atomic64_read_acquire
  600. #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
  601. #endif
  602. #ifndef atomic64_set_release
  603. #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
  604. #endif
  605. /* atomic64_add_return_relaxed */
  606. #ifndef atomic64_add_return_relaxed
  607. #define atomic64_add_return_relaxed atomic64_add_return
  608. #define atomic64_add_return_acquire atomic64_add_return
  609. #define atomic64_add_return_release atomic64_add_return
  610. #else /* atomic64_add_return_relaxed */
  611. #ifndef atomic64_add_return_acquire
  612. #define atomic64_add_return_acquire(...) \
  613. __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
  614. #endif
  615. #ifndef atomic64_add_return_release
  616. #define atomic64_add_return_release(...) \
  617. __atomic_op_release(atomic64_add_return, __VA_ARGS__)
  618. #endif
  619. #ifndef atomic64_add_return
  620. #define atomic64_add_return(...) \
  621. __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
  622. #endif
  623. #endif /* atomic64_add_return_relaxed */
  624. #ifndef atomic64_inc
  625. #define atomic64_inc(v) atomic64_add(1, (v))
  626. #endif
  627. /* atomic64_inc_return_relaxed */
  628. #ifndef atomic64_inc_return_relaxed
  629. #ifndef atomic64_inc_return
  630. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  631. #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
  632. #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
  633. #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
  634. #else /* atomic64_inc_return */
  635. #define atomic64_inc_return_relaxed atomic64_inc_return
  636. #define atomic64_inc_return_acquire atomic64_inc_return
  637. #define atomic64_inc_return_release atomic64_inc_return
  638. #endif /* atomic64_inc_return */
  639. #else /* atomic64_inc_return_relaxed */
  640. #ifndef atomic64_inc_return_acquire
  641. #define atomic64_inc_return_acquire(...) \
  642. __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
  643. #endif
  644. #ifndef atomic64_inc_return_release
  645. #define atomic64_inc_return_release(...) \
  646. __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
  647. #endif
  648. #ifndef atomic64_inc_return
  649. #define atomic64_inc_return(...) \
  650. __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
  651. #endif
  652. #endif /* atomic64_inc_return_relaxed */
  653. /* atomic64_sub_return_relaxed */
  654. #ifndef atomic64_sub_return_relaxed
  655. #define atomic64_sub_return_relaxed atomic64_sub_return
  656. #define atomic64_sub_return_acquire atomic64_sub_return
  657. #define atomic64_sub_return_release atomic64_sub_return
  658. #else /* atomic64_sub_return_relaxed */
  659. #ifndef atomic64_sub_return_acquire
  660. #define atomic64_sub_return_acquire(...) \
  661. __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
  662. #endif
  663. #ifndef atomic64_sub_return_release
  664. #define atomic64_sub_return_release(...) \
  665. __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
  666. #endif
  667. #ifndef atomic64_sub_return
  668. #define atomic64_sub_return(...) \
  669. __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
  670. #endif
  671. #endif /* atomic64_sub_return_relaxed */
  672. #ifndef atomic64_dec
  673. #define atomic64_dec(v) atomic64_sub(1, (v))
  674. #endif
  675. /* atomic64_dec_return_relaxed */
  676. #ifndef atomic64_dec_return_relaxed
  677. #ifndef atomic64_dec_return
  678. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  679. #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
  680. #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
  681. #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
  682. #else /* atomic64_dec_return */
  683. #define atomic64_dec_return_relaxed atomic64_dec_return
  684. #define atomic64_dec_return_acquire atomic64_dec_return
  685. #define atomic64_dec_return_release atomic64_dec_return
  686. #endif /* atomic64_dec_return */
  687. #else /* atomic64_dec_return_relaxed */
  688. #ifndef atomic64_dec_return_acquire
  689. #define atomic64_dec_return_acquire(...) \
  690. __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
  691. #endif
  692. #ifndef atomic64_dec_return_release
  693. #define atomic64_dec_return_release(...) \
  694. __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
  695. #endif
  696. #ifndef atomic64_dec_return
  697. #define atomic64_dec_return(...) \
  698. __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
  699. #endif
  700. #endif /* atomic64_dec_return_relaxed */
  701. /* atomic64_fetch_add_relaxed */
  702. #ifndef atomic64_fetch_add_relaxed
  703. #define atomic64_fetch_add_relaxed atomic64_fetch_add
  704. #define atomic64_fetch_add_acquire atomic64_fetch_add
  705. #define atomic64_fetch_add_release atomic64_fetch_add
  706. #else /* atomic64_fetch_add_relaxed */
  707. #ifndef atomic64_fetch_add_acquire
  708. #define atomic64_fetch_add_acquire(...) \
  709. __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
  710. #endif
  711. #ifndef atomic64_fetch_add_release
  712. #define atomic64_fetch_add_release(...) \
  713. __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
  714. #endif
  715. #ifndef atomic64_fetch_add
  716. #define atomic64_fetch_add(...) \
  717. __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
  718. #endif
  719. #endif /* atomic64_fetch_add_relaxed */
  720. /* atomic64_fetch_inc_relaxed */
  721. #ifndef atomic64_fetch_inc_relaxed
  722. #ifndef atomic64_fetch_inc
  723. #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
  724. #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
  725. #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
  726. #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
  727. #else /* atomic64_fetch_inc */
  728. #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
  729. #define atomic64_fetch_inc_acquire atomic64_fetch_inc
  730. #define atomic64_fetch_inc_release atomic64_fetch_inc
  731. #endif /* atomic64_fetch_inc */
  732. #else /* atomic64_fetch_inc_relaxed */
  733. #ifndef atomic64_fetch_inc_acquire
  734. #define atomic64_fetch_inc_acquire(...) \
  735. __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
  736. #endif
  737. #ifndef atomic64_fetch_inc_release
  738. #define atomic64_fetch_inc_release(...) \
  739. __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
  740. #endif
  741. #ifndef atomic64_fetch_inc
  742. #define atomic64_fetch_inc(...) \
  743. __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
  744. #endif
  745. #endif /* atomic64_fetch_inc_relaxed */
  746. /* atomic64_fetch_sub_relaxed */
  747. #ifndef atomic64_fetch_sub_relaxed
  748. #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
  749. #define atomic64_fetch_sub_acquire atomic64_fetch_sub
  750. #define atomic64_fetch_sub_release atomic64_fetch_sub
  751. #else /* atomic64_fetch_sub_relaxed */
  752. #ifndef atomic64_fetch_sub_acquire
  753. #define atomic64_fetch_sub_acquire(...) \
  754. __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
  755. #endif
  756. #ifndef atomic64_fetch_sub_release
  757. #define atomic64_fetch_sub_release(...) \
  758. __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
  759. #endif
  760. #ifndef atomic64_fetch_sub
  761. #define atomic64_fetch_sub(...) \
  762. __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
  763. #endif
  764. #endif /* atomic64_fetch_sub_relaxed */
  765. /* atomic64_fetch_dec_relaxed */
  766. #ifndef atomic64_fetch_dec_relaxed
  767. #ifndef atomic64_fetch_dec
  768. #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
  769. #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
  770. #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
  771. #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
  772. #else /* atomic64_fetch_dec */
  773. #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
  774. #define atomic64_fetch_dec_acquire atomic64_fetch_dec
  775. #define atomic64_fetch_dec_release atomic64_fetch_dec
  776. #endif /* atomic64_fetch_dec */
  777. #else /* atomic64_fetch_dec_relaxed */
  778. #ifndef atomic64_fetch_dec_acquire
  779. #define atomic64_fetch_dec_acquire(...) \
  780. __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
  781. #endif
  782. #ifndef atomic64_fetch_dec_release
  783. #define atomic64_fetch_dec_release(...) \
  784. __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
  785. #endif
  786. #ifndef atomic64_fetch_dec
  787. #define atomic64_fetch_dec(...) \
  788. __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
  789. #endif
  790. #endif /* atomic64_fetch_dec_relaxed */
  791. /* atomic64_fetch_or_relaxed */
  792. #ifndef atomic64_fetch_or_relaxed
  793. #define atomic64_fetch_or_relaxed atomic64_fetch_or
  794. #define atomic64_fetch_or_acquire atomic64_fetch_or
  795. #define atomic64_fetch_or_release atomic64_fetch_or
  796. #else /* atomic64_fetch_or_relaxed */
  797. #ifndef atomic64_fetch_or_acquire
  798. #define atomic64_fetch_or_acquire(...) \
  799. __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
  800. #endif
  801. #ifndef atomic64_fetch_or_release
  802. #define atomic64_fetch_or_release(...) \
  803. __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
  804. #endif
  805. #ifndef atomic64_fetch_or
  806. #define atomic64_fetch_or(...) \
  807. __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
  808. #endif
  809. #endif /* atomic64_fetch_or_relaxed */
  810. /* atomic64_fetch_and_relaxed */
  811. #ifndef atomic64_fetch_and_relaxed
  812. #define atomic64_fetch_and_relaxed atomic64_fetch_and
  813. #define atomic64_fetch_and_acquire atomic64_fetch_and
  814. #define atomic64_fetch_and_release atomic64_fetch_and
  815. #else /* atomic64_fetch_and_relaxed */
  816. #ifndef atomic64_fetch_and_acquire
  817. #define atomic64_fetch_and_acquire(...) \
  818. __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
  819. #endif
  820. #ifndef atomic64_fetch_and_release
  821. #define atomic64_fetch_and_release(...) \
  822. __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
  823. #endif
  824. #ifndef atomic64_fetch_and
  825. #define atomic64_fetch_and(...) \
  826. __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
  827. #endif
  828. #endif /* atomic64_fetch_and_relaxed */
  829. #ifndef atomic64_andnot
  830. #define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
  831. #endif
  832. #ifndef atomic64_fetch_andnot_relaxed
  833. #ifndef atomic64_fetch_andnot
  834. #define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
  835. #define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
  836. #define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
  837. #define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
  838. #else /* atomic64_fetch_andnot */
  839. #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
  840. #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
  841. #define atomic64_fetch_andnot_release atomic64_fetch_andnot
  842. #endif /* atomic64_fetch_andnot */
  843. #else /* atomic64_fetch_andnot_relaxed */
  844. #ifndef atomic64_fetch_andnot_acquire
  845. #define atomic64_fetch_andnot_acquire(...) \
  846. __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
  847. #endif
  848. #ifndef atomic64_fetch_andnot_release
  849. #define atomic64_fetch_andnot_release(...) \
  850. __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
  851. #endif
  852. #ifndef atomic64_fetch_andnot
  853. #define atomic64_fetch_andnot(...) \
  854. __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
  855. #endif
  856. #endif /* atomic64_fetch_andnot_relaxed */
  857. /* atomic64_fetch_xor_relaxed */
  858. #ifndef atomic64_fetch_xor_relaxed
  859. #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
  860. #define atomic64_fetch_xor_acquire atomic64_fetch_xor
  861. #define atomic64_fetch_xor_release atomic64_fetch_xor
  862. #else /* atomic64_fetch_xor_relaxed */
  863. #ifndef atomic64_fetch_xor_acquire
  864. #define atomic64_fetch_xor_acquire(...) \
  865. __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
  866. #endif
  867. #ifndef atomic64_fetch_xor_release
  868. #define atomic64_fetch_xor_release(...) \
  869. __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
  870. #endif
  871. #ifndef atomic64_fetch_xor
  872. #define atomic64_fetch_xor(...) \
  873. __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
  874. #endif
  875. #endif /* atomic64_fetch_xor_relaxed */
  876. /* atomic64_xchg_relaxed */
  877. #ifndef atomic64_xchg_relaxed
  878. #define atomic64_xchg_relaxed atomic64_xchg
  879. #define atomic64_xchg_acquire atomic64_xchg
  880. #define atomic64_xchg_release atomic64_xchg
  881. #else /* atomic64_xchg_relaxed */
  882. #ifndef atomic64_xchg_acquire
  883. #define atomic64_xchg_acquire(...) \
  884. __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
  885. #endif
  886. #ifndef atomic64_xchg_release
  887. #define atomic64_xchg_release(...) \
  888. __atomic_op_release(atomic64_xchg, __VA_ARGS__)
  889. #endif
  890. #ifndef atomic64_xchg
  891. #define atomic64_xchg(...) \
  892. __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
  893. #endif
  894. #endif /* atomic64_xchg_relaxed */
  895. /* atomic64_cmpxchg_relaxed */
  896. #ifndef atomic64_cmpxchg_relaxed
  897. #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
  898. #define atomic64_cmpxchg_acquire atomic64_cmpxchg
  899. #define atomic64_cmpxchg_release atomic64_cmpxchg
  900. #else /* atomic64_cmpxchg_relaxed */
  901. #ifndef atomic64_cmpxchg_acquire
  902. #define atomic64_cmpxchg_acquire(...) \
  903. __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
  904. #endif
  905. #ifndef atomic64_cmpxchg_release
  906. #define atomic64_cmpxchg_release(...) \
  907. __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
  908. #endif
  909. #ifndef atomic64_cmpxchg
  910. #define atomic64_cmpxchg(...) \
  911. __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
  912. #endif
  913. #endif /* atomic64_cmpxchg_relaxed */
  914. #ifndef atomic64_try_cmpxchg
  915. #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
  916. ({ \
  917. typeof(_po) __po = (_po); \
  918. typeof(*(_po)) __r, __o = *__po; \
  919. __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
  920. if (unlikely(__r != __o)) \
  921. *__po = __r; \
  922. likely(__r == __o); \
  923. })
  924. #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
  925. #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
  926. #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
  927. #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
  928. #else /* atomic64_try_cmpxchg */
  929. #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
  930. #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
  931. #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
  932. #endif /* atomic64_try_cmpxchg */
  933. /**
  934. * atomic64_fetch_add_unless - add unless the number is already a given value
  935. * @v: pointer of type atomic64_t
  936. * @a: the amount to add to v...
  937. * @u: ...unless v is equal to u.
  938. *
  939. * Atomically adds @a to @v, if @v was not already @u.
  940. * Returns the original value of @v.
  941. */
  942. #ifndef atomic64_fetch_add_unless
  943. static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
  944. long long u)
  945. {
  946. long long c = atomic64_read(v);
  947. do {
  948. if (unlikely(c == u))
  949. break;
  950. } while (!atomic64_try_cmpxchg(v, &c, c + a));
  951. return c;
  952. }
  953. #endif
  954. /**
  955. * atomic64_add_unless - add unless the number is already a given value
  956. * @v: pointer of type atomic_t
  957. * @a: the amount to add to v...
  958. * @u: ...unless v is equal to u.
  959. *
  960. * Atomically adds @a to @v, if @v was not already @u.
  961. * Returns true if the addition was done.
  962. */
  963. static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
  964. {
  965. return atomic64_fetch_add_unless(v, a, u) != u;
  966. }
  967. /**
  968. * atomic64_inc_not_zero - increment unless the number is zero
  969. * @v: pointer of type atomic64_t
  970. *
  971. * Atomically increments @v by 1, if @v is non-zero.
  972. * Returns true if the increment was done.
  973. */
  974. #ifndef atomic64_inc_not_zero
  975. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  976. #endif
  977. /**
  978. * atomic64_inc_and_test - increment and test
  979. * @v: pointer of type atomic64_t
  980. *
  981. * Atomically increments @v by 1
  982. * and returns true if the result is zero, or false for all
  983. * other cases.
  984. */
  985. #ifndef atomic64_inc_and_test
  986. static inline bool atomic64_inc_and_test(atomic64_t *v)
  987. {
  988. return atomic64_inc_return(v) == 0;
  989. }
  990. #endif
  991. /**
  992. * atomic64_dec_and_test - decrement and test
  993. * @v: pointer of type atomic64_t
  994. *
  995. * Atomically decrements @v by 1 and
  996. * returns true if the result is 0, or false for all other
  997. * cases.
  998. */
  999. #ifndef atomic64_dec_and_test
  1000. static inline bool atomic64_dec_and_test(atomic64_t *v)
  1001. {
  1002. return atomic64_dec_return(v) == 0;
  1003. }
  1004. #endif
  1005. /**
  1006. * atomic64_sub_and_test - subtract value from variable and test result
  1007. * @i: integer value to subtract
  1008. * @v: pointer of type atomic64_t
  1009. *
  1010. * Atomically subtracts @i from @v and returns
  1011. * true if the result is zero, or false for all
  1012. * other cases.
  1013. */
  1014. #ifndef atomic64_sub_and_test
  1015. static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
  1016. {
  1017. return atomic64_sub_return(i, v) == 0;
  1018. }
  1019. #endif
  1020. /**
  1021. * atomic64_add_negative - add and test if negative
  1022. * @i: integer value to add
  1023. * @v: pointer of type atomic64_t
  1024. *
  1025. * Atomically adds @i to @v and returns true
  1026. * if the result is negative, or false when
  1027. * result is greater than or equal to zero.
  1028. */
  1029. #ifndef atomic64_add_negative
  1030. static inline bool atomic64_add_negative(long long i, atomic64_t *v)
  1031. {
  1032. return atomic64_add_return(i, v) < 0;
  1033. }
  1034. #endif
  1035. #ifndef atomic64_inc_unless_negative
  1036. static inline bool atomic64_inc_unless_negative(atomic64_t *v)
  1037. {
  1038. long long c = atomic64_read(v);
  1039. do {
  1040. if (unlikely(c < 0))
  1041. return false;
  1042. } while (!atomic64_try_cmpxchg(v, &c, c + 1));
  1043. return true;
  1044. }
  1045. #endif
  1046. #ifndef atomic64_dec_unless_positive
  1047. static inline bool atomic64_dec_unless_positive(atomic64_t *v)
  1048. {
  1049. long long c = atomic64_read(v);
  1050. do {
  1051. if (unlikely(c > 0))
  1052. return false;
  1053. } while (!atomic64_try_cmpxchg(v, &c, c - 1));
  1054. return true;
  1055. }
  1056. #endif
  1057. /*
  1058. * atomic64_dec_if_positive - decrement by 1 if old value positive
  1059. * @v: pointer of type atomic64_t
  1060. *
  1061. * The function returns the old value of *v minus 1, even if
  1062. * the atomic64 variable, v, was not decremented.
  1063. */
  1064. #ifndef atomic64_dec_if_positive
  1065. static inline long long atomic64_dec_if_positive(atomic64_t *v)
  1066. {
  1067. long long dec, c = atomic64_read(v);
  1068. do {
  1069. dec = c - 1;
  1070. if (unlikely(dec < 0))
  1071. break;
  1072. } while (!atomic64_try_cmpxchg(v, &c, dec));
  1073. return dec;
  1074. }
  1075. #endif
  1076. #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
  1077. #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
  1078. #include <asm-generic/atomic-long.h>
  1079. #endif /* _LINUX_ATOMIC_H */