atomic_i.h 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536
  1. /*
  2. * Copyright (c) 2018 Richard Braun.
  3. * Copyright (c) 2017 Agustina Arzille.
  4. *
  5. * This program is free software: you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License as published by
  7. * the Free Software Foundation, either version 3 of the License, or
  8. * (at your option) any later version.
  9. *
  10. * This program is distributed in the hope that it will be useful,
  11. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. * GNU General Public License for more details.
  14. *
  15. * You should have received a copy of the GNU General Public License
  16. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  17. *
  18. *
  19. * Architecture-specific code may override any of the type-specific
  20. * functions by defining a macro of the same name.
  21. */
  22. #ifndef KERN_ATOMIC_I_H
  23. #define KERN_ATOMIC_I_H
  24. #include <stdbool.h>
  25. #include <stdint.h>
  26. #include <kern/atomic_types.h>
  27. #include <kern/macros.h>
  28. #include <machine/atomic.h>
  29. #define ATOMIC_ALIGN(ptr) MIN(sizeof(*(ptr)), sizeof(ptr))
  30. #define atomic_ptr_aligned(ptr) P2ALIGNED((uintptr_t)(ptr), ATOMIC_ALIGN(ptr))
  31. /*
  32. * This macro is used to select the appropriate function for the given
  33. * operation. The default expression is selected for pointer types.
  34. * In order to avoid confusing errors, all built-in types are explicitely
  35. * listed, so that unsupported ones don't select pointer operations.
  36. * Instead, they select a function with an explicit name indicating
  37. * an invalid type.
  38. */
  39. #define atomic_select(ptr, op) \
  40. _Generic(*(ptr), \
  41. float: atomic_invalid_type, \
  42. double: atomic_invalid_type, \
  43. long double: atomic_invalid_type, \
  44. bool: atomic_invalid_type, \
  45. char: atomic_invalid_type, \
  46. signed char: atomic_invalid_type, \
  47. unsigned char: atomic_invalid_type, \
  48. short: atomic_invalid_type, \
  49. unsigned short: atomic_invalid_type, \
  50. int: atomic_ ## op ## _32, \
  51. unsigned int: atomic_ ## op ## _32, \
  52. long: atomic_ ## op ## _ul, \
  53. unsigned long: atomic_ ## op ## _ul, \
  54. long long: atomic_ ## op ## _64, \
  55. unsigned long long: atomic_ ## op ## _64, \
  56. default: atomic_ ## op ## _ptr)
  57. void atomic_invalid_type(void);
  58. /* atomic_load */
  59. #ifndef atomic_load_32
  60. static inline unsigned int
  61. atomic_load_32(union atomic_constptr_32 ptr, int memorder)
  62. {
  63. return __atomic_load_n(ptr.ui_ptr, memorder);
  64. }
  65. #endif /* atomic_load_32 */
  66. #ifdef ATOMIC_HAVE_64B_OPS
  67. #ifndef atomic_load_64
  68. static inline unsigned long long
  69. atomic_load_64(union atomic_constptr_64 ptr, int memorder)
  70. {
  71. return __atomic_load_n(ptr.ull_ptr, memorder);
  72. }
  73. #endif /* atomic_load_64 */
  74. #else /* ATOMIC_HAVE_64B_OPS */
  75. #define atomic_load_64 atomic_invalid_type
  76. #endif /* ATOMIC_HAVE_64B_OPS */
  77. #ifdef __LP64__
  78. #define atomic_load_ul atomic_load_64
  79. #else /* __LP64__ */
  80. #define atomic_load_ul atomic_load_32
  81. #endif /* __LP64__ */
  82. #define atomic_load_ptr atomic_load_ul
  83. /* atomic_store */
  84. #ifndef atomic_store_32
  85. static inline void
  86. atomic_store_32(union atomic_ptr_32 ptr, union atomic_val_32 val, int memorder)
  87. {
  88. return __atomic_store_n(ptr.ui_ptr, val.ui, memorder);
  89. }
  90. #endif /* atomic_store_32 */
  91. #ifdef ATOMIC_HAVE_64B_OPS
  92. #ifndef atomic_store_64
  93. static inline void
  94. atomic_store_64(union atomic_ptr_64 ptr, union atomic_val_64 val, int memorder)
  95. {
  96. return __atomic_store_n(ptr.ull_ptr, val.ull, memorder);
  97. }
  98. #endif /* atomic_store_64 */
  99. #else /* ATOMIC_HAVE_64B_OPS */
  100. #define atomic_store_64 atomic_invalid_type
  101. #endif /* ATOMIC_HAVE_64B_OPS */
  102. #ifdef __LP64__
  103. #define atomic_store_ul atomic_store_64
  104. #else /* __LP64__ */
  105. #define atomic_store_ul atomic_store_32
  106. #endif /* __LP64__ */
  107. #define atomic_store_ptr atomic_store_ul
  108. /* atomic_cas */
  109. #define atomic_cas_n(ptr, oval, nval, memorder) \
  110. MACRO_BEGIN \
  111. typeof(oval) oval_; \
  112. \
  113. oval_ = (oval); \
  114. __atomic_compare_exchange_n(ptr, &oval_, (nval), false, \
  115. memorder, __ATOMIC_RELAXED); \
  116. oval_; \
  117. MACRO_END
  118. #ifndef atomic_cas_32
  119. static inline unsigned int
  120. atomic_cas_32(union atomic_ptr_32 ptr, union atomic_val_32 oval,
  121. union atomic_val_32 nval, int memorder)
  122. {
  123. return atomic_cas_n(ptr.ui_ptr, oval.ui, nval.ui, memorder);
  124. }
  125. #endif /* atomic_cas_32 */
  126. #ifdef ATOMIC_HAVE_64B_OPS
  127. #ifndef atomic_cas_64
  128. static inline unsigned long long
  129. atomic_cas_64(union atomic_ptr_64 ptr, union atomic_val_64 oval,
  130. union atomic_val_64 nval, int memorder)
  131. {
  132. return atomic_cas_n(ptr.ull_ptr, oval.ull, nval.ull, memorder);
  133. }
  134. #endif /* atomic_cas_64 */
  135. #else /* ATOMIC_HAVE_64B_OPS */
  136. #define atomic_cas_64 atomic_invalid_type
  137. #endif /* ATOMIC_HAVE_64B_OPS */
  138. #ifdef __LP64__
  139. #define atomic_cas_ul atomic_cas_64
  140. #else /* __LP64__ */
  141. #define atomic_cas_ul atomic_cas_32
  142. #endif /* __LP64__ */
  143. #define atomic_cas_ptr atomic_cas_ul
  144. /* atomic_swap */
  145. #ifndef atomic_swap_32
  146. static inline unsigned int
  147. atomic_swap_32(union atomic_ptr_32 ptr, union atomic_val_32 val, int memorder)
  148. {
  149. return __atomic_exchange_n(ptr.ui_ptr, val.ui, memorder);
  150. }
  151. #endif /* atomic_swap_32 */
  152. #ifdef ATOMIC_HAVE_64B_OPS
  153. #ifndef atomic_swap_64
  154. static inline unsigned long long
  155. atomic_swap_64(union atomic_ptr_64 ptr, union atomic_val_64 val, int memorder)
  156. {
  157. return __atomic_exchange_n(ptr.ull_ptr, val.ull, memorder);
  158. }
  159. #endif /* atomic_swap_64 */
  160. #else /* ATOMIC_HAVE_64B_OPS */
  161. #define atomic_swap_64 atomic_invalid_type
  162. #endif /* ATOMIC_HAVE_64B_OPS */
  163. #ifdef __LP64__
  164. #define atomic_swap_ul atomic_swap_64
  165. #else /* __LP64__ */
  166. #define atomic_swap_ul atomic_swap_32
  167. #endif /* __LP64__ */
  168. #define atomic_swap_ptr atomic_swap_ul
  169. /* atomic_fetch_add */
  170. #ifndef atomic_fetch_add_32
  171. static inline unsigned int
  172. atomic_fetch_add_32(union atomic_ptr_32 ptr, union atomic_val_32 val,
  173. int memorder)
  174. {
  175. return __atomic_fetch_add(ptr.ui_ptr, val.ui, memorder);
  176. }
  177. #endif /* atomic_fetch_add_32 */
  178. #ifdef ATOMIC_HAVE_64B_OPS
  179. #ifndef atomic_fetch_add_64
  180. static inline unsigned long long
  181. atomic_fetch_add_64(union atomic_ptr_64 ptr, union atomic_val_64 val,
  182. int memorder)
  183. {
  184. return __atomic_fetch_add(ptr.ull_ptr, val.ull, memorder);
  185. }
  186. #endif /* atomic_fetch_add_64 */
  187. #else /* ATOMIC_HAVE_64B_OPS */
  188. #define atomic_fetch_add_64 atomic_invalid_type
  189. #endif /* ATOMIC_HAVE_64B_OPS */
  190. #ifdef __LP64__
  191. #define atomic_fetch_add_ul atomic_fetch_add_64
  192. #else /* __LP64__ */
  193. #define atomic_fetch_add_ul atomic_fetch_add_32
  194. #endif /* __LP64__ */
  195. #define atomic_fetch_add_ptr atomic_fetch_add_ul
  196. /* atomic_fetch_sub */
  197. #ifndef atomic_fetch_sub_32
  198. static inline unsigned int
  199. atomic_fetch_sub_32(union atomic_ptr_32 ptr, union atomic_val_32 val,
  200. int memorder)
  201. {
  202. return __atomic_fetch_sub(ptr.ui_ptr, val.ui, memorder);
  203. }
  204. #endif /* atomic_fetch_sub_32 */
  205. #ifdef ATOMIC_HAVE_64B_OPS
  206. #ifndef atomic_fetch_sub_64
  207. static inline unsigned long long
  208. atomic_fetch_sub_64(union atomic_ptr_64 ptr, union atomic_val_64 val,
  209. int memorder)
  210. {
  211. return __atomic_fetch_sub(ptr.ull_ptr, val.ull, memorder);
  212. }
  213. #endif /* atomic_fetch_sub_64 */
  214. #else /* ATOMIC_HAVE_64B_OPS */
  215. #define atomic_fetch_sub_64 atomic_invalid_type
  216. #endif /* ATOMIC_HAVE_64B_OPS */
  217. #ifdef __LP64__
  218. #define atomic_fetch_sub_ul atomic_fetch_sub_64
  219. #else /* __LP64__ */
  220. #define atomic_fetch_sub_ul atomic_fetch_sub_32
  221. #endif /* __LP64__ */
  222. #define atomic_fetch_sub_ptr atomic_fetch_sub_ul
  223. /* atomic_fetch_and */
  224. #ifndef atomic_fetch_and_32
  225. static inline unsigned int
  226. atomic_fetch_and_32(union atomic_ptr_32 ptr, union atomic_val_32 val,
  227. int memorder)
  228. {
  229. return __atomic_fetch_and(ptr.ui_ptr, val.ui, memorder);
  230. }
  231. #endif /* atomic_fetch_and_32 */
  232. #ifdef ATOMIC_HAVE_64B_OPS
  233. #ifndef atomic_fetch_and_64
  234. static inline unsigned long long
  235. atomic_fetch_and_64(union atomic_ptr_64 ptr, union atomic_val_64 val,
  236. int memorder)
  237. {
  238. return __atomic_fetch_and(ptr.ull_ptr, val.ull, memorder);
  239. }
  240. #endif /* atomic_fetch_and_64 */
  241. #else /* ATOMIC_HAVE_64B_OPS */
  242. #define atomic_fetch_and_64 atomic_invalid_type
  243. #endif /* ATOMIC_HAVE_64B_OPS */
  244. #ifdef __LP64__
  245. #define atomic_fetch_and_ul atomic_fetch_and_64
  246. #else /* __LP64__ */
  247. #define atomic_fetch_and_ul atomic_fetch_and_32
  248. #endif /* __LP64__ */
  249. #define atomic_fetch_and_ptr atomic_fetch_and_ul
  250. /* atomic_fetch_or */
  251. #ifndef atomic_fetch_or_32
  252. static inline unsigned int
  253. atomic_fetch_or_32(union atomic_ptr_32 ptr, union atomic_val_32 val,
  254. int memorder)
  255. {
  256. return __atomic_fetch_or(ptr.ui_ptr, val.ui, memorder);
  257. }
  258. #endif /* atomic_fetch_or_32 */
  259. #ifdef ATOMIC_HAVE_64B_OPS
  260. #ifndef atomic_fetch_or_64
  261. static inline unsigned long long
  262. atomic_fetch_or_64(union atomic_ptr_64 ptr, union atomic_val_64 val,
  263. int memorder)
  264. {
  265. return __atomic_fetch_or(ptr.ull_ptr, val.ull, memorder);
  266. }
  267. #endif /* atomic_fetch_or_64 */
  268. #else /* ATOMIC_HAVE_64B_OPS */
  269. #define atomic_or_64 atomic_invalid_type
  270. #endif /* ATOMIC_HAVE_64B_OPS */
  271. #ifdef __LP64__
  272. #define atomic_fetch_or_ul atomic_fetch_or_64
  273. #else /* __LP64__ */
  274. #define atomic_fetch_or_ul atomic_fetch_or_32
  275. #endif /* __LP64__ */
  276. #define atomic_fetch_or_ptr atomic_fetch_or_ul
  277. /* atomic_fetch_xor */
  278. #ifndef atomic_fetch_xor_32
  279. static inline unsigned int
  280. atomic_fetch_xor_32(union atomic_ptr_32 ptr, union atomic_val_32 val,
  281. int memorder)
  282. {
  283. return __atomic_fetch_xor(ptr.ui_ptr, val.ui, memorder);
  284. }
  285. #endif /* atomic_fetch_xor_32 */
  286. #ifdef ATOMIC_HAVE_64B_OPS
  287. #ifndef atomic_fetch_xor_64
  288. static inline unsigned long long
  289. atomic_fetch_xor_64(union atomic_ptr_64 ptr, union atomic_val_64 val,
  290. int memorder)
  291. {
  292. return __atomic_fetch_xor(ptr.ull_ptr, val.ull, memorder);
  293. }
  294. #endif /* atomic_fetch_xor_64 */
  295. #else /* ATOMIC_HAVE_64B_OPS */
  296. #define atomic_xor_64 atomic_invalid_type
  297. #endif /* ATOMIC_HAVE_64B_OPS */
  298. #ifdef __LP64__
  299. #define atomic_fetch_xor_ul atomic_fetch_xor_64
  300. #else /* __LP64__ */
  301. #define atomic_fetch_xor_ul atomic_fetch_xor_32
  302. #endif /* __LP64__ */
  303. #define atomic_fetch_xor_ptr atomic_fetch_xor_ul
  304. /* atomic_add */
  305. #ifndef atomic_add_32
  306. static inline void
  307. atomic_add_32(union atomic_ptr_32 ptr, union atomic_val_32 val, int memorder)
  308. {
  309. __atomic_add_fetch(ptr.ui_ptr, val.ui, memorder);
  310. }
  311. #endif /* atomic_add_32 */
  312. #ifdef ATOMIC_HAVE_64B_OPS
  313. #ifndef atomic_add_64
  314. static inline void
  315. atomic_add_64(union atomic_ptr_64 ptr, union atomic_val_64 val, int memorder)
  316. {
  317. __atomic_add_fetch(ptr.ull_ptr, val.ull, memorder);
  318. }
  319. #endif /* atomic_add_64 */
  320. #else /* ATOMIC_HAVE_64B_OPS */
  321. #define atomic_add_64 atomic_invalid_type
  322. #endif /* ATOMIC_HAVE_64B_OPS */
  323. #ifdef __LP64__
  324. #define atomic_add_ul atomic_add_64
  325. #else /* __LP64__ */
  326. #define atomic_add_ul atomic_add_32
  327. #endif /* __LP64__ */
  328. #define atomic_add_ptr atomic_add_ul
  329. /* atomic_sub */
  330. #ifndef atomic_sub_32
  331. static inline void
  332. atomic_sub_32(union atomic_ptr_32 ptr, union atomic_val_32 val, int memorder)
  333. {
  334. __atomic_sub_fetch(ptr.ui_ptr, val.ui, memorder);
  335. }
  336. #endif /* atomic_sub_32 */
  337. #ifdef ATOMIC_HAVE_64B_OPS
  338. #ifndef atomic_sub_64
  339. static inline void
  340. atomic_sub_64(union atomic_ptr_64 ptr, union atomic_val_64 val, int memorder)
  341. {
  342. __atomic_sub_fetch(ptr.ull_ptr, val.ull, memorder);
  343. }
  344. #endif /* atomic_sub_64 */
  345. #else /* ATOMIC_HAVE_64B_OPS */
  346. #define atomic_sub_64 atomic_invalid_type
  347. #endif /* ATOMIC_HAVE_64B_OPS */
  348. #ifdef __LP64__
  349. #define atomic_sub_ul atomic_sub_64
  350. #else /* __LP64__ */
  351. #define atomic_sub_ul atomic_sub_32
  352. #endif /* __LP64__ */
  353. #define atomic_sub_ptr atomic_sub_ul
  354. /* atomic_and */
  355. #ifndef atomic_and_32
  356. static inline void
  357. atomic_and_32(union atomic_ptr_32 ptr, union atomic_val_32 val, int memorder)
  358. {
  359. __atomic_and_fetch(ptr.ui_ptr, val.ui, memorder);
  360. }
  361. #endif /* atomic_and_32 */
  362. #ifdef ATOMIC_HAVE_64B_OPS
  363. #ifndef atomic_and_64
  364. static inline void
  365. atomic_and_64(union atomic_ptr_64 ptr, union atomic_val_64 val, int memorder)
  366. {
  367. __atomic_and_fetch(ptr.ull_ptr, val.ull, memorder);
  368. }
  369. #endif /* atomic_and_64 */
  370. #else /* ATOMIC_HAVE_64B_OPS */
  371. #define atomic_and_64 atomic_invalid_type
  372. #endif /* ATOMIC_HAVE_64B_OPS */
  373. #ifdef __LP64__
  374. #define atomic_and_ul atomic_and_64
  375. #else /* __LP64__ */
  376. #define atomic_and_ul atomic_and_32
  377. #endif /* __LP64__ */
  378. #define atomic_and_ptr atomic_and_ul
  379. /* atomic_or */
  380. #ifndef atomic_or_32
  381. static inline void
  382. atomic_or_32(union atomic_ptr_32 ptr, union atomic_val_32 val, int memorder)
  383. {
  384. __atomic_or_fetch(ptr.ui_ptr, val.ui, memorder);
  385. }
  386. #endif /* atomic_or_32 */
  387. #ifdef ATOMIC_HAVE_64B_OPS
  388. #ifndef atomic_or_64
  389. static inline void
  390. atomic_or_64(union atomic_ptr_64 ptr, union atomic_val_64 val, int memorder)
  391. {
  392. __atomic_or_fetch(ptr.ull_ptr, val.ull, memorder);
  393. }
  394. #endif /* atomic_or_64 */
  395. #else /* ATOMIC_HAVE_64B_OPS */
  396. #define atomic_or_64 atomic_invalid_type
  397. #endif /* ATOMIC_HAVE_64B_OPS */
  398. #ifdef __LP64__
  399. #define atomic_or_ul atomic_or_64
  400. #else /* __LP64__ */
  401. #define atomic_or_ul atomic_or_32
  402. #endif /* __LP64__ */
  403. #define atomic_or_ptr atomic_or_ul
  404. /* atomic_xor */
  405. #ifndef atomic_xor_32
  406. static inline void
  407. atomic_xor_32(union atomic_ptr_32 ptr, union atomic_val_32 val, int memorder)
  408. {
  409. __atomic_xor_fetch(ptr.ui_ptr, val.ui, memorder);
  410. }
  411. #endif /* atomic_xor_32 */
  412. #ifdef ATOMIC_HAVE_64B_OPS
  413. #ifndef atomic_xor_64
  414. static inline void
  415. atomic_xor_64(union atomic_ptr_64 ptr, union atomic_val_64 val, int memorder)
  416. {
  417. __atomic_xor_fetch(ptr.ull_ptr, val.ull, memorder);
  418. }
  419. #endif /* atomic_xor_64 */
  420. #else /* ATOMIC_HAVE_64B_OPS */
  421. #define atomic_xor_64 atomic_invalid_type
  422. #endif /* ATOMIC_HAVE_64B_OPS */
  423. #ifdef __LP64__
  424. #define atomic_xor_ul atomic_xor_64
  425. #else /* __LP64__ */
  426. #define atomic_xor_ul atomic_xor_32
  427. #endif /* __LP64__ */
  428. #define atomic_xor_ptr atomic_xor_ul
  429. #endif /* KERN_ATOMIC_I_H */