atomic-instrumented.h 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468
  1. /*
  2. * This file provides wrappers with KASAN instrumentation for atomic operations.
  3. * To use this functionality an arch's atomic.h file needs to define all
  4. * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
  5. * this file at the end. This file provides atomic_read() that forwards to
  6. * arch_atomic_read() for actual atomic operation.
  7. * Note: if an arch atomic operation is implemented by means of other atomic
  8. * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
  9. * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
  10. * double instrumentation.
  11. */
  12. #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
  13. #define _LINUX_ATOMIC_INSTRUMENTED_H
  14. #include <linux/build_bug.h>
  15. #include <linux/kasan-checks.h>
  16. static __always_inline int atomic_read(const atomic_t *v)
  17. {
  18. kasan_check_read(v, sizeof(*v));
  19. return arch_atomic_read(v);
  20. }
  21. static __always_inline s64 atomic64_read(const atomic64_t *v)
  22. {
  23. kasan_check_read(v, sizeof(*v));
  24. return arch_atomic64_read(v);
  25. }
  26. static __always_inline void atomic_set(atomic_t *v, int i)
  27. {
  28. kasan_check_write(v, sizeof(*v));
  29. arch_atomic_set(v, i);
  30. }
  31. static __always_inline void atomic64_set(atomic64_t *v, s64 i)
  32. {
  33. kasan_check_write(v, sizeof(*v));
  34. arch_atomic64_set(v, i);
  35. }
  36. static __always_inline int atomic_xchg(atomic_t *v, int i)
  37. {
  38. kasan_check_write(v, sizeof(*v));
  39. return arch_atomic_xchg(v, i);
  40. }
  41. static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
  42. {
  43. kasan_check_write(v, sizeof(*v));
  44. return arch_atomic64_xchg(v, i);
  45. }
  46. static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  47. {
  48. kasan_check_write(v, sizeof(*v));
  49. return arch_atomic_cmpxchg(v, old, new);
  50. }
  51. static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
  52. {
  53. kasan_check_write(v, sizeof(*v));
  54. return arch_atomic64_cmpxchg(v, old, new);
  55. }
  56. #ifdef arch_atomic_try_cmpxchg
  57. #define atomic_try_cmpxchg atomic_try_cmpxchg
  58. static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  59. {
  60. kasan_check_write(v, sizeof(*v));
  61. kasan_check_read(old, sizeof(*old));
  62. return arch_atomic_try_cmpxchg(v, old, new);
  63. }
  64. #endif
  65. #ifdef arch_atomic64_try_cmpxchg
  66. #define atomic64_try_cmpxchg atomic64_try_cmpxchg
  67. static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  68. {
  69. kasan_check_write(v, sizeof(*v));
  70. kasan_check_read(old, sizeof(*old));
  71. return arch_atomic64_try_cmpxchg(v, old, new);
  72. }
  73. #endif
  74. #ifdef arch_atomic_fetch_add_unless
  75. #define atomic_fetch_add_unless atomic_fetch_add_unless
  76. static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
  77. {
  78. kasan_check_write(v, sizeof(*v));
  79. return arch_atomic_fetch_add_unless(v, a, u);
  80. }
  81. #endif
  82. #ifdef arch_atomic64_fetch_add_unless
  83. #define atomic64_fetch_add_unless atomic64_fetch_add_unless
  84. static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
  85. {
  86. kasan_check_write(v, sizeof(*v));
  87. return arch_atomic64_fetch_add_unless(v, a, u);
  88. }
  89. #endif
  90. #ifdef arch_atomic_inc
  91. #define atomic_inc atomic_inc
  92. static __always_inline void atomic_inc(atomic_t *v)
  93. {
  94. kasan_check_write(v, sizeof(*v));
  95. arch_atomic_inc(v);
  96. }
  97. #endif
  98. #ifdef arch_atomic64_inc
  99. #define atomic64_inc atomic64_inc
  100. static __always_inline void atomic64_inc(atomic64_t *v)
  101. {
  102. kasan_check_write(v, sizeof(*v));
  103. arch_atomic64_inc(v);
  104. }
  105. #endif
  106. #ifdef arch_atomic_dec
  107. #define atomic_dec atomic_dec
  108. static __always_inline void atomic_dec(atomic_t *v)
  109. {
  110. kasan_check_write(v, sizeof(*v));
  111. arch_atomic_dec(v);
  112. }
  113. #endif
  114. #ifdef atch_atomic64_dec
  115. #define atomic64_dec
  116. static __always_inline void atomic64_dec(atomic64_t *v)
  117. {
  118. kasan_check_write(v, sizeof(*v));
  119. arch_atomic64_dec(v);
  120. }
  121. #endif
  122. static __always_inline void atomic_add(int i, atomic_t *v)
  123. {
  124. kasan_check_write(v, sizeof(*v));
  125. arch_atomic_add(i, v);
  126. }
  127. static __always_inline void atomic64_add(s64 i, atomic64_t *v)
  128. {
  129. kasan_check_write(v, sizeof(*v));
  130. arch_atomic64_add(i, v);
  131. }
  132. static __always_inline void atomic_sub(int i, atomic_t *v)
  133. {
  134. kasan_check_write(v, sizeof(*v));
  135. arch_atomic_sub(i, v);
  136. }
  137. static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
  138. {
  139. kasan_check_write(v, sizeof(*v));
  140. arch_atomic64_sub(i, v);
  141. }
  142. static __always_inline void atomic_and(int i, atomic_t *v)
  143. {
  144. kasan_check_write(v, sizeof(*v));
  145. arch_atomic_and(i, v);
  146. }
  147. static __always_inline void atomic64_and(s64 i, atomic64_t *v)
  148. {
  149. kasan_check_write(v, sizeof(*v));
  150. arch_atomic64_and(i, v);
  151. }
  152. static __always_inline void atomic_or(int i, atomic_t *v)
  153. {
  154. kasan_check_write(v, sizeof(*v));
  155. arch_atomic_or(i, v);
  156. }
  157. static __always_inline void atomic64_or(s64 i, atomic64_t *v)
  158. {
  159. kasan_check_write(v, sizeof(*v));
  160. arch_atomic64_or(i, v);
  161. }
  162. static __always_inline void atomic_xor(int i, atomic_t *v)
  163. {
  164. kasan_check_write(v, sizeof(*v));
  165. arch_atomic_xor(i, v);
  166. }
  167. static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
  168. {
  169. kasan_check_write(v, sizeof(*v));
  170. arch_atomic64_xor(i, v);
  171. }
  172. #ifdef arch_atomic_inc_return
  173. #define atomic_inc_return atomic_inc_return
  174. static __always_inline int atomic_inc_return(atomic_t *v)
  175. {
  176. kasan_check_write(v, sizeof(*v));
  177. return arch_atomic_inc_return(v);
  178. }
  179. #endif
  180. #ifdef arch_atomic64_in_return
  181. #define atomic64_inc_return atomic64_inc_return
  182. static __always_inline s64 atomic64_inc_return(atomic64_t *v)
  183. {
  184. kasan_check_write(v, sizeof(*v));
  185. return arch_atomic64_inc_return(v);
  186. }
  187. #endif
  188. #ifdef arch_atomic_dec_return
  189. #define atomic_dec_return atomic_dec_return
  190. static __always_inline int atomic_dec_return(atomic_t *v)
  191. {
  192. kasan_check_write(v, sizeof(*v));
  193. return arch_atomic_dec_return(v);
  194. }
  195. #endif
  196. #ifdef arch_atomic64_dec_return
  197. #define atomic64_dec_return atomic64_dec_return
  198. static __always_inline s64 atomic64_dec_return(atomic64_t *v)
  199. {
  200. kasan_check_write(v, sizeof(*v));
  201. return arch_atomic64_dec_return(v);
  202. }
  203. #endif
  204. #ifdef arch_atomic64_inc_not_zero
  205. #define atomic64_inc_not_zero atomic64_inc_not_zero
  206. static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
  207. {
  208. kasan_check_write(v, sizeof(*v));
  209. return arch_atomic64_inc_not_zero(v);
  210. }
  211. #endif
  212. #ifdef arch_atomic64_dec_if_positive
  213. #define atomic64_dec_if_positive atomic64_dec_if_positive
  214. static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
  215. {
  216. kasan_check_write(v, sizeof(*v));
  217. return arch_atomic64_dec_if_positive(v);
  218. }
  219. #endif
  220. #ifdef arch_atomic_dec_and_test
  221. #define atomic_dec_and_test atomic_dec_and_test
  222. static __always_inline bool atomic_dec_and_test(atomic_t *v)
  223. {
  224. kasan_check_write(v, sizeof(*v));
  225. return arch_atomic_dec_and_test(v);
  226. }
  227. #endif
  228. #ifdef arch_atomic64_dec_and_test
  229. #define atomic64_dec_and_test atomic64_dec_and_test
  230. static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
  231. {
  232. kasan_check_write(v, sizeof(*v));
  233. return arch_atomic64_dec_and_test(v);
  234. }
  235. #endif
  236. #ifdef arch_atomic_inc_and_test
  237. #define atomic_inc_and_test atomic_inc_and_test
  238. static __always_inline bool atomic_inc_and_test(atomic_t *v)
  239. {
  240. kasan_check_write(v, sizeof(*v));
  241. return arch_atomic_inc_and_test(v);
  242. }
  243. #endif
  244. #ifdef arch_atomic64_inc_and_test
  245. #define atomic64_inc_and_test atomic64_inc_and_test
  246. static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
  247. {
  248. kasan_check_write(v, sizeof(*v));
  249. return arch_atomic64_inc_and_test(v);
  250. }
  251. #endif
  252. static __always_inline int atomic_add_return(int i, atomic_t *v)
  253. {
  254. kasan_check_write(v, sizeof(*v));
  255. return arch_atomic_add_return(i, v);
  256. }
  257. static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
  258. {
  259. kasan_check_write(v, sizeof(*v));
  260. return arch_atomic64_add_return(i, v);
  261. }
  262. static __always_inline int atomic_sub_return(int i, atomic_t *v)
  263. {
  264. kasan_check_write(v, sizeof(*v));
  265. return arch_atomic_sub_return(i, v);
  266. }
  267. static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
  268. {
  269. kasan_check_write(v, sizeof(*v));
  270. return arch_atomic64_sub_return(i, v);
  271. }
  272. static __always_inline int atomic_fetch_add(int i, atomic_t *v)
  273. {
  274. kasan_check_write(v, sizeof(*v));
  275. return arch_atomic_fetch_add(i, v);
  276. }
  277. static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
  278. {
  279. kasan_check_write(v, sizeof(*v));
  280. return arch_atomic64_fetch_add(i, v);
  281. }
  282. static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
  283. {
  284. kasan_check_write(v, sizeof(*v));
  285. return arch_atomic_fetch_sub(i, v);
  286. }
  287. static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
  288. {
  289. kasan_check_write(v, sizeof(*v));
  290. return arch_atomic64_fetch_sub(i, v);
  291. }
  292. static __always_inline int atomic_fetch_and(int i, atomic_t *v)
  293. {
  294. kasan_check_write(v, sizeof(*v));
  295. return arch_atomic_fetch_and(i, v);
  296. }
  297. static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
  298. {
  299. kasan_check_write(v, sizeof(*v));
  300. return arch_atomic64_fetch_and(i, v);
  301. }
  302. static __always_inline int atomic_fetch_or(int i, atomic_t *v)
  303. {
  304. kasan_check_write(v, sizeof(*v));
  305. return arch_atomic_fetch_or(i, v);
  306. }
  307. static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
  308. {
  309. kasan_check_write(v, sizeof(*v));
  310. return arch_atomic64_fetch_or(i, v);
  311. }
  312. static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
  313. {
  314. kasan_check_write(v, sizeof(*v));
  315. return arch_atomic_fetch_xor(i, v);
  316. }
  317. static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
  318. {
  319. kasan_check_write(v, sizeof(*v));
  320. return arch_atomic64_fetch_xor(i, v);
  321. }
  322. #ifdef arch_atomic_sub_and_test
  323. #define atomic_sub_and_test atomic_sub_and_test
  324. static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
  325. {
  326. kasan_check_write(v, sizeof(*v));
  327. return arch_atomic_sub_and_test(i, v);
  328. }
  329. #endif
  330. #ifdef arch_atomic64_sub_and_test
  331. #define atomic64_sub_and_test atomic64_sub_and_test
  332. static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
  333. {
  334. kasan_check_write(v, sizeof(*v));
  335. return arch_atomic64_sub_and_test(i, v);
  336. }
  337. #endif
  338. #ifdef arch_atomic_add_negative
  339. #define atomic_add_negative atomic_add_negative
  340. static __always_inline bool atomic_add_negative(int i, atomic_t *v)
  341. {
  342. kasan_check_write(v, sizeof(*v));
  343. return arch_atomic_add_negative(i, v);
  344. }
  345. #endif
  346. #ifdef arch_atomic64_add_negative
  347. #define atomic64_add_negative atomic64_add_negative
  348. static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
  349. {
  350. kasan_check_write(v, sizeof(*v));
  351. return arch_atomic64_add_negative(i, v);
  352. }
  353. #endif
  354. #define xchg(ptr, new) \
  355. ({ \
  356. typeof(ptr) __ai_ptr = (ptr); \
  357. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  358. arch_xchg(__ai_ptr, (new)); \
  359. })
  360. #define cmpxchg(ptr, old, new) \
  361. ({ \
  362. typeof(ptr) __ai_ptr = (ptr); \
  363. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  364. arch_cmpxchg(__ai_ptr, (old), (new)); \
  365. })
  366. #define sync_cmpxchg(ptr, old, new) \
  367. ({ \
  368. typeof(ptr) __ai_ptr = (ptr); \
  369. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  370. arch_sync_cmpxchg(__ai_ptr, (old), (new)); \
  371. })
  372. #define cmpxchg_local(ptr, old, new) \
  373. ({ \
  374. typeof(ptr) __ai_ptr = (ptr); \
  375. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  376. arch_cmpxchg_local(__ai_ptr, (old), (new)); \
  377. })
  378. #define cmpxchg64(ptr, old, new) \
  379. ({ \
  380. typeof(ptr) __ai_ptr = (ptr); \
  381. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  382. arch_cmpxchg64(__ai_ptr, (old), (new)); \
  383. })
  384. #define cmpxchg64_local(ptr, old, new) \
  385. ({ \
  386. typeof(ptr) __ai_ptr = (ptr); \
  387. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  388. arch_cmpxchg64_local(__ai_ptr, (old), (new)); \
  389. })
  390. #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
  391. ({ \
  392. typeof(p1) __ai_p1 = (p1); \
  393. kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
  394. arch_cmpxchg_double(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
  395. })
  396. #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
  397. ({ \
  398. typeof(p1) __ai_p1 = (p1); \
  399. kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
  400. arch_cmpxchg_double_local(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
  401. })
  402. #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */