cmpxchg.h 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384
  1. /*
  2. * Copyright (C) 2014 Regents of the University of California
  3. *
  4. * This program is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU General Public License
  6. * as published by the Free Software Foundation, version 2.
  7. *
  8. * This program is distributed in the hope that it will be useful,
  9. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. * GNU General Public License for more details.
  12. */
  13. #ifndef _ASM_RISCV_CMPXCHG_H
  14. #define _ASM_RISCV_CMPXCHG_H
  15. #include <linux/bug.h>
  16. #include <asm/barrier.h>
  17. #include <asm/fence.h>
  18. #define __xchg_relaxed(ptr, new, size) \
  19. ({ \
  20. __typeof__(ptr) __ptr = (ptr); \
  21. __typeof__(new) __new = (new); \
  22. __typeof__(*(ptr)) __ret; \
  23. switch (size) { \
  24. case 4: \
  25. __asm__ __volatile__ ( \
  26. " amoswap.w %0, %2, %1\n" \
  27. : "=r" (__ret), "+A" (*__ptr) \
  28. : "r" (__new) \
  29. : "memory"); \
  30. break; \
  31. case 8: \
  32. __asm__ __volatile__ ( \
  33. " amoswap.d %0, %2, %1\n" \
  34. : "=r" (__ret), "+A" (*__ptr) \
  35. : "r" (__new) \
  36. : "memory"); \
  37. break; \
  38. default: \
  39. BUILD_BUG(); \
  40. } \
  41. __ret; \
  42. })
  43. #define xchg_relaxed(ptr, x) \
  44. ({ \
  45. __typeof__(*(ptr)) _x_ = (x); \
  46. (__typeof__(*(ptr))) __xchg_relaxed((ptr), \
  47. _x_, sizeof(*(ptr))); \
  48. })
  49. #define __xchg_acquire(ptr, new, size) \
  50. ({ \
  51. __typeof__(ptr) __ptr = (ptr); \
  52. __typeof__(new) __new = (new); \
  53. __typeof__(*(ptr)) __ret; \
  54. switch (size) { \
  55. case 4: \
  56. __asm__ __volatile__ ( \
  57. " amoswap.w %0, %2, %1\n" \
  58. RISCV_ACQUIRE_BARRIER \
  59. : "=r" (__ret), "+A" (*__ptr) \
  60. : "r" (__new) \
  61. : "memory"); \
  62. break; \
  63. case 8: \
  64. __asm__ __volatile__ ( \
  65. " amoswap.d %0, %2, %1\n" \
  66. RISCV_ACQUIRE_BARRIER \
  67. : "=r" (__ret), "+A" (*__ptr) \
  68. : "r" (__new) \
  69. : "memory"); \
  70. break; \
  71. default: \
  72. BUILD_BUG(); \
  73. } \
  74. __ret; \
  75. })
  76. #define xchg_acquire(ptr, x) \
  77. ({ \
  78. __typeof__(*(ptr)) _x_ = (x); \
  79. (__typeof__(*(ptr))) __xchg_acquire((ptr), \
  80. _x_, sizeof(*(ptr))); \
  81. })
  82. #define __xchg_release(ptr, new, size) \
  83. ({ \
  84. __typeof__(ptr) __ptr = (ptr); \
  85. __typeof__(new) __new = (new); \
  86. __typeof__(*(ptr)) __ret; \
  87. switch (size) { \
  88. case 4: \
  89. __asm__ __volatile__ ( \
  90. RISCV_RELEASE_BARRIER \
  91. " amoswap.w %0, %2, %1\n" \
  92. : "=r" (__ret), "+A" (*__ptr) \
  93. : "r" (__new) \
  94. : "memory"); \
  95. break; \
  96. case 8: \
  97. __asm__ __volatile__ ( \
  98. RISCV_RELEASE_BARRIER \
  99. " amoswap.d %0, %2, %1\n" \
  100. : "=r" (__ret), "+A" (*__ptr) \
  101. : "r" (__new) \
  102. : "memory"); \
  103. break; \
  104. default: \
  105. BUILD_BUG(); \
  106. } \
  107. __ret; \
  108. })
  109. #define xchg_release(ptr, x) \
  110. ({ \
  111. __typeof__(*(ptr)) _x_ = (x); \
  112. (__typeof__(*(ptr))) __xchg_release((ptr), \
  113. _x_, sizeof(*(ptr))); \
  114. })
  115. #define __xchg(ptr, new, size) \
  116. ({ \
  117. __typeof__(ptr) __ptr = (ptr); \
  118. __typeof__(new) __new = (new); \
  119. __typeof__(*(ptr)) __ret; \
  120. switch (size) { \
  121. case 4: \
  122. __asm__ __volatile__ ( \
  123. " amoswap.w.aqrl %0, %2, %1\n" \
  124. : "=r" (__ret), "+A" (*__ptr) \
  125. : "r" (__new) \
  126. : "memory"); \
  127. break; \
  128. case 8: \
  129. __asm__ __volatile__ ( \
  130. " amoswap.d.aqrl %0, %2, %1\n" \
  131. : "=r" (__ret), "+A" (*__ptr) \
  132. : "r" (__new) \
  133. : "memory"); \
  134. break; \
  135. default: \
  136. BUILD_BUG(); \
  137. } \
  138. __ret; \
  139. })
  140. #define xchg(ptr, x) \
  141. ({ \
  142. __typeof__(*(ptr)) _x_ = (x); \
  143. (__typeof__(*(ptr))) __xchg((ptr), _x_, sizeof(*(ptr))); \
  144. })
  145. #define xchg32(ptr, x) \
  146. ({ \
  147. BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
  148. xchg((ptr), (x)); \
  149. })
  150. #define xchg64(ptr, x) \
  151. ({ \
  152. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  153. xchg((ptr), (x)); \
  154. })
  155. /*
  156. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  157. * store NEW in MEM. Return the initial value in MEM. Success is
  158. * indicated by comparing RETURN with OLD.
  159. */
  160. #define __cmpxchg_relaxed(ptr, old, new, size) \
  161. ({ \
  162. __typeof__(ptr) __ptr = (ptr); \
  163. __typeof__(*(ptr)) __old = (old); \
  164. __typeof__(*(ptr)) __new = (new); \
  165. __typeof__(*(ptr)) __ret; \
  166. register unsigned int __rc; \
  167. switch (size) { \
  168. case 4: \
  169. __asm__ __volatile__ ( \
  170. "0: lr.w %0, %2\n" \
  171. " bne %0, %z3, 1f\n" \
  172. " sc.w %1, %z4, %2\n" \
  173. " bnez %1, 0b\n" \
  174. "1:\n" \
  175. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  176. : "rJ" (__old), "rJ" (__new) \
  177. : "memory"); \
  178. break; \
  179. case 8: \
  180. __asm__ __volatile__ ( \
  181. "0: lr.d %0, %2\n" \
  182. " bne %0, %z3, 1f\n" \
  183. " sc.d %1, %z4, %2\n" \
  184. " bnez %1, 0b\n" \
  185. "1:\n" \
  186. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  187. : "rJ" (__old), "rJ" (__new) \
  188. : "memory"); \
  189. break; \
  190. default: \
  191. BUILD_BUG(); \
  192. } \
  193. __ret; \
  194. })
  195. #define cmpxchg_relaxed(ptr, o, n) \
  196. ({ \
  197. __typeof__(*(ptr)) _o_ = (o); \
  198. __typeof__(*(ptr)) _n_ = (n); \
  199. (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \
  200. _o_, _n_, sizeof(*(ptr))); \
  201. })
  202. #define __cmpxchg_acquire(ptr, old, new, size) \
  203. ({ \
  204. __typeof__(ptr) __ptr = (ptr); \
  205. __typeof__(*(ptr)) __old = (old); \
  206. __typeof__(*(ptr)) __new = (new); \
  207. __typeof__(*(ptr)) __ret; \
  208. register unsigned int __rc; \
  209. switch (size) { \
  210. case 4: \
  211. __asm__ __volatile__ ( \
  212. "0: lr.w %0, %2\n" \
  213. " bne %0, %z3, 1f\n" \
  214. " sc.w %1, %z4, %2\n" \
  215. " bnez %1, 0b\n" \
  216. RISCV_ACQUIRE_BARRIER \
  217. "1:\n" \
  218. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  219. : "rJ" (__old), "rJ" (__new) \
  220. : "memory"); \
  221. break; \
  222. case 8: \
  223. __asm__ __volatile__ ( \
  224. "0: lr.d %0, %2\n" \
  225. " bne %0, %z3, 1f\n" \
  226. " sc.d %1, %z4, %2\n" \
  227. " bnez %1, 0b\n" \
  228. RISCV_ACQUIRE_BARRIER \
  229. "1:\n" \
  230. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  231. : "rJ" (__old), "rJ" (__new) \
  232. : "memory"); \
  233. break; \
  234. default: \
  235. BUILD_BUG(); \
  236. } \
  237. __ret; \
  238. })
  239. #define cmpxchg_acquire(ptr, o, n) \
  240. ({ \
  241. __typeof__(*(ptr)) _o_ = (o); \
  242. __typeof__(*(ptr)) _n_ = (n); \
  243. (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \
  244. _o_, _n_, sizeof(*(ptr))); \
  245. })
  246. #define __cmpxchg_release(ptr, old, new, size) \
  247. ({ \
  248. __typeof__(ptr) __ptr = (ptr); \
  249. __typeof__(*(ptr)) __old = (old); \
  250. __typeof__(*(ptr)) __new = (new); \
  251. __typeof__(*(ptr)) __ret; \
  252. register unsigned int __rc; \
  253. switch (size) { \
  254. case 4: \
  255. __asm__ __volatile__ ( \
  256. RISCV_RELEASE_BARRIER \
  257. "0: lr.w %0, %2\n" \
  258. " bne %0, %z3, 1f\n" \
  259. " sc.w %1, %z4, %2\n" \
  260. " bnez %1, 0b\n" \
  261. "1:\n" \
  262. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  263. : "rJ" (__old), "rJ" (__new) \
  264. : "memory"); \
  265. break; \
  266. case 8: \
  267. __asm__ __volatile__ ( \
  268. RISCV_RELEASE_BARRIER \
  269. "0: lr.d %0, %2\n" \
  270. " bne %0, %z3, 1f\n" \
  271. " sc.d %1, %z4, %2\n" \
  272. " bnez %1, 0b\n" \
  273. "1:\n" \
  274. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  275. : "rJ" (__old), "rJ" (__new) \
  276. : "memory"); \
  277. break; \
  278. default: \
  279. BUILD_BUG(); \
  280. } \
  281. __ret; \
  282. })
  283. #define cmpxchg_release(ptr, o, n) \
  284. ({ \
  285. __typeof__(*(ptr)) _o_ = (o); \
  286. __typeof__(*(ptr)) _n_ = (n); \
  287. (__typeof__(*(ptr))) __cmpxchg_release((ptr), \
  288. _o_, _n_, sizeof(*(ptr))); \
  289. })
  290. #define __cmpxchg(ptr, old, new, size) \
  291. ({ \
  292. __typeof__(ptr) __ptr = (ptr); \
  293. __typeof__(*(ptr)) __old = (old); \
  294. __typeof__(*(ptr)) __new = (new); \
  295. __typeof__(*(ptr)) __ret; \
  296. register unsigned int __rc; \
  297. switch (size) { \
  298. case 4: \
  299. __asm__ __volatile__ ( \
  300. "0: lr.w %0, %2\n" \
  301. " bne %0, %z3, 1f\n" \
  302. " sc.w.rl %1, %z4, %2\n" \
  303. " bnez %1, 0b\n" \
  304. " fence rw, rw\n" \
  305. "1:\n" \
  306. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  307. : "rJ" (__old), "rJ" (__new) \
  308. : "memory"); \
  309. break; \
  310. case 8: \
  311. __asm__ __volatile__ ( \
  312. "0: lr.d %0, %2\n" \
  313. " bne %0, %z3, 1f\n" \
  314. " sc.d.rl %1, %z4, %2\n" \
  315. " bnez %1, 0b\n" \
  316. " fence rw, rw\n" \
  317. "1:\n" \
  318. : "=&r" (__ret), "=&r" (__rc), "+A" (*__ptr) \
  319. : "rJ" (__old), "rJ" (__new) \
  320. : "memory"); \
  321. break; \
  322. default: \
  323. BUILD_BUG(); \
  324. } \
  325. __ret; \
  326. })
  327. #define cmpxchg(ptr, o, n) \
  328. ({ \
  329. __typeof__(*(ptr)) _o_ = (o); \
  330. __typeof__(*(ptr)) _n_ = (n); \
  331. (__typeof__(*(ptr))) __cmpxchg((ptr), \
  332. _o_, _n_, sizeof(*(ptr))); \
  333. })
  334. #define cmpxchg_local(ptr, o, n) \
  335. (__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
  336. #define cmpxchg32(ptr, o, n) \
  337. ({ \
  338. BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
  339. cmpxchg((ptr), (o), (n)); \
  340. })
  341. #define cmpxchg32_local(ptr, o, n) \
  342. ({ \
  343. BUILD_BUG_ON(sizeof(*(ptr)) != 4); \
  344. cmpxchg_relaxed((ptr), (o), (n)) \
  345. })
  346. #define cmpxchg64(ptr, o, n) \
  347. ({ \
  348. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  349. cmpxchg((ptr), (o), (n)); \
  350. })
  351. #define cmpxchg64_local(ptr, o, n) \
  352. ({ \
  353. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  354. cmpxchg_relaxed((ptr), (o), (n)); \
  355. })
  356. #endif /* _ASM_RISCV_CMPXCHG_H */