atomic.h 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <linux/types.h>
  18. #include <asm/barrier.h>
  19. #include <asm/compiler.h>
  20. #include <asm/cpu-features.h>
  21. #include <asm/cmpxchg.h>
  22. #include <asm/war.h>
  23. #define ATOMIC_INIT(i) { (i) }
  24. /*
  25. * atomic_read - read atomic variable
  26. * @v: pointer of type atomic_t
  27. *
  28. * Atomically reads the value of @v.
  29. */
  30. #define atomic_read(v) READ_ONCE((v)->counter)
  31. /*
  32. * atomic_set - set atomic variable
  33. * @v: pointer of type atomic_t
  34. * @i: required value
  35. *
  36. * Atomically sets the value of @v to @i.
  37. */
  38. #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
  39. #define ATOMIC_OP(op, c_op, asm_op) \
  40. static __inline__ void atomic_##op(int i, atomic_t * v) \
  41. { \
  42. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  43. int temp; \
  44. \
  45. __asm__ __volatile__( \
  46. " .set arch=r4000 \n" \
  47. "1: ll %0, %1 # atomic_" #op " \n" \
  48. " " #asm_op " %0, %2 \n" \
  49. " sc %0, %1 \n" \
  50. " beqzl %0, 1b \n" \
  51. " .set mips0 \n" \
  52. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  53. : "Ir" (i)); \
  54. } else if (kernel_uses_llsc) { \
  55. int temp; \
  56. \
  57. do { \
  58. __asm__ __volatile__( \
  59. " .set "MIPS_ISA_LEVEL" \n" \
  60. " ll %0, %1 # atomic_" #op "\n" \
  61. " " #asm_op " %0, %2 \n" \
  62. " sc %0, %1 \n" \
  63. " .set mips0 \n" \
  64. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  65. : "Ir" (i)); \
  66. } while (unlikely(!temp)); \
  67. } else { \
  68. unsigned long flags; \
  69. \
  70. raw_local_irq_save(flags); \
  71. v->counter c_op i; \
  72. raw_local_irq_restore(flags); \
  73. } \
  74. }
  75. #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
  76. static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
  77. { \
  78. int result; \
  79. \
  80. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  81. int temp; \
  82. \
  83. __asm__ __volatile__( \
  84. " .set arch=r4000 \n" \
  85. "1: ll %1, %2 # atomic_" #op "_return \n" \
  86. " " #asm_op " %0, %1, %3 \n" \
  87. " sc %0, %2 \n" \
  88. " beqzl %0, 1b \n" \
  89. " " #asm_op " %0, %1, %3 \n" \
  90. " .set mips0 \n" \
  91. : "=&r" (result), "=&r" (temp), \
  92. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  93. : "Ir" (i)); \
  94. } else if (kernel_uses_llsc) { \
  95. int temp; \
  96. \
  97. do { \
  98. __asm__ __volatile__( \
  99. " .set "MIPS_ISA_LEVEL" \n" \
  100. " ll %1, %2 # atomic_" #op "_return \n" \
  101. " " #asm_op " %0, %1, %3 \n" \
  102. " sc %0, %2 \n" \
  103. " .set mips0 \n" \
  104. : "=&r" (result), "=&r" (temp), \
  105. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  106. : "Ir" (i)); \
  107. } while (unlikely(!result)); \
  108. \
  109. result = temp; result c_op i; \
  110. } else { \
  111. unsigned long flags; \
  112. \
  113. raw_local_irq_save(flags); \
  114. result = v->counter; \
  115. result c_op i; \
  116. v->counter = result; \
  117. raw_local_irq_restore(flags); \
  118. } \
  119. \
  120. return result; \
  121. }
  122. #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
  123. static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
  124. { \
  125. int result; \
  126. \
  127. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  128. int temp; \
  129. \
  130. __asm__ __volatile__( \
  131. " .set arch=r4000 \n" \
  132. "1: ll %1, %2 # atomic_fetch_" #op " \n" \
  133. " " #asm_op " %0, %1, %3 \n" \
  134. " sc %0, %2 \n" \
  135. " beqzl %0, 1b \n" \
  136. " move %0, %1 \n" \
  137. " .set mips0 \n" \
  138. : "=&r" (result), "=&r" (temp), \
  139. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  140. : "Ir" (i)); \
  141. } else if (kernel_uses_llsc) { \
  142. int temp; \
  143. \
  144. do { \
  145. __asm__ __volatile__( \
  146. " .set "MIPS_ISA_LEVEL" \n" \
  147. " ll %1, %2 # atomic_fetch_" #op " \n" \
  148. " " #asm_op " %0, %1, %3 \n" \
  149. " sc %0, %2 \n" \
  150. " .set mips0 \n" \
  151. : "=&r" (result), "=&r" (temp), \
  152. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  153. : "Ir" (i)); \
  154. } while (unlikely(!result)); \
  155. \
  156. result = temp; \
  157. } else { \
  158. unsigned long flags; \
  159. \
  160. raw_local_irq_save(flags); \
  161. result = v->counter; \
  162. v->counter c_op i; \
  163. raw_local_irq_restore(flags); \
  164. } \
  165. \
  166. return result; \
  167. }
  168. #define ATOMIC_OPS(op, c_op, asm_op) \
  169. ATOMIC_OP(op, c_op, asm_op) \
  170. ATOMIC_OP_RETURN(op, c_op, asm_op) \
  171. ATOMIC_FETCH_OP(op, c_op, asm_op)
  172. ATOMIC_OPS(add, +=, addu)
  173. ATOMIC_OPS(sub, -=, subu)
  174. #define atomic_add_return_relaxed atomic_add_return_relaxed
  175. #define atomic_sub_return_relaxed atomic_sub_return_relaxed
  176. #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
  177. #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
  178. #undef ATOMIC_OPS
  179. #define ATOMIC_OPS(op, c_op, asm_op) \
  180. ATOMIC_OP(op, c_op, asm_op) \
  181. ATOMIC_FETCH_OP(op, c_op, asm_op)
  182. ATOMIC_OPS(and, &=, and)
  183. ATOMIC_OPS(or, |=, or)
  184. ATOMIC_OPS(xor, ^=, xor)
  185. #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
  186. #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
  187. #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
  188. #undef ATOMIC_OPS
  189. #undef ATOMIC_FETCH_OP
  190. #undef ATOMIC_OP_RETURN
  191. #undef ATOMIC_OP
  192. /*
  193. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  194. * @i: integer value to subtract
  195. * @v: pointer of type atomic_t
  196. *
  197. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  198. * The function returns the old value of @v minus @i.
  199. */
  200. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  201. {
  202. int result;
  203. smp_mb__before_llsc();
  204. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  205. int temp;
  206. __asm__ __volatile__(
  207. " .set arch=r4000 \n"
  208. "1: ll %1, %2 # atomic_sub_if_positive\n"
  209. " subu %0, %1, %3 \n"
  210. " bltz %0, 1f \n"
  211. " sc %0, %2 \n"
  212. " .set noreorder \n"
  213. " beqzl %0, 1b \n"
  214. " subu %0, %1, %3 \n"
  215. " .set reorder \n"
  216. "1: \n"
  217. " .set mips0 \n"
  218. : "=&r" (result), "=&r" (temp),
  219. "+" GCC_OFF_SMALL_ASM() (v->counter)
  220. : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
  221. : "memory");
  222. } else if (kernel_uses_llsc) {
  223. int temp;
  224. __asm__ __volatile__(
  225. " .set "MIPS_ISA_LEVEL" \n"
  226. "1: ll %1, %2 # atomic_sub_if_positive\n"
  227. " subu %0, %1, %3 \n"
  228. " bltz %0, 1f \n"
  229. " sc %0, %2 \n"
  230. " .set noreorder \n"
  231. " beqz %0, 1b \n"
  232. " subu %0, %1, %3 \n"
  233. " .set reorder \n"
  234. "1: \n"
  235. " .set mips0 \n"
  236. : "=&r" (result), "=&r" (temp),
  237. "+" GCC_OFF_SMALL_ASM() (v->counter)
  238. : "Ir" (i));
  239. } else {
  240. unsigned long flags;
  241. raw_local_irq_save(flags);
  242. result = v->counter;
  243. result -= i;
  244. if (result >= 0)
  245. v->counter = result;
  246. raw_local_irq_restore(flags);
  247. }
  248. smp_llsc_mb();
  249. return result;
  250. }
  251. #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  252. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  253. /**
  254. * __atomic_add_unless - add unless the number is a given value
  255. * @v: pointer of type atomic_t
  256. * @a: the amount to add to v...
  257. * @u: ...unless v is equal to u.
  258. *
  259. * Atomically adds @a to @v, so long as it was not @u.
  260. * Returns the old value of @v.
  261. */
  262. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  263. {
  264. int c, old;
  265. c = atomic_read(v);
  266. for (;;) {
  267. if (unlikely(c == (u)))
  268. break;
  269. old = atomic_cmpxchg((v), c, c + (a));
  270. if (likely(old == c))
  271. break;
  272. c = old;
  273. }
  274. return c;
  275. }
  276. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  277. #define atomic_inc_return(v) atomic_add_return(1, (v))
  278. /*
  279. * atomic_sub_and_test - subtract value from variable and test result
  280. * @i: integer value to subtract
  281. * @v: pointer of type atomic_t
  282. *
  283. * Atomically subtracts @i from @v and returns
  284. * true if the result is zero, or false for all
  285. * other cases.
  286. */
  287. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  288. /*
  289. * atomic_inc_and_test - increment and test
  290. * @v: pointer of type atomic_t
  291. *
  292. * Atomically increments @v by 1
  293. * and returns true if the result is zero, or false for all
  294. * other cases.
  295. */
  296. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  297. /*
  298. * atomic_dec_and_test - decrement by 1 and test
  299. * @v: pointer of type atomic_t
  300. *
  301. * Atomically decrements @v by 1 and
  302. * returns true if the result is 0, or false for all other
  303. * cases.
  304. */
  305. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  306. /*
  307. * atomic_dec_if_positive - decrement by 1 if old value positive
  308. * @v: pointer of type atomic_t
  309. */
  310. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  311. /*
  312. * atomic_inc - increment atomic variable
  313. * @v: pointer of type atomic_t
  314. *
  315. * Atomically increments @v by 1.
  316. */
  317. #define atomic_inc(v) atomic_add(1, (v))
  318. /*
  319. * atomic_dec - decrement and test
  320. * @v: pointer of type atomic_t
  321. *
  322. * Atomically decrements @v by 1.
  323. */
  324. #define atomic_dec(v) atomic_sub(1, (v))
  325. /*
  326. * atomic_add_negative - add and test if negative
  327. * @v: pointer of type atomic_t
  328. * @i: integer value to add
  329. *
  330. * Atomically adds @i to @v and returns true
  331. * if the result is negative, or false when
  332. * result is greater than or equal to zero.
  333. */
  334. #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
  335. #ifdef CONFIG_64BIT
  336. #define ATOMIC64_INIT(i) { (i) }
  337. /*
  338. * atomic64_read - read atomic variable
  339. * @v: pointer of type atomic64_t
  340. *
  341. */
  342. #define atomic64_read(v) READ_ONCE((v)->counter)
  343. /*
  344. * atomic64_set - set atomic variable
  345. * @v: pointer of type atomic64_t
  346. * @i: required value
  347. */
  348. #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
  349. #define ATOMIC64_OP(op, c_op, asm_op) \
  350. static __inline__ void atomic64_##op(long i, atomic64_t * v) \
  351. { \
  352. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  353. long temp; \
  354. \
  355. __asm__ __volatile__( \
  356. " .set arch=r4000 \n" \
  357. "1: lld %0, %1 # atomic64_" #op " \n" \
  358. " " #asm_op " %0, %2 \n" \
  359. " scd %0, %1 \n" \
  360. " beqzl %0, 1b \n" \
  361. " .set mips0 \n" \
  362. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  363. : "Ir" (i)); \
  364. } else if (kernel_uses_llsc) { \
  365. long temp; \
  366. \
  367. do { \
  368. __asm__ __volatile__( \
  369. " .set "MIPS_ISA_LEVEL" \n" \
  370. " lld %0, %1 # atomic64_" #op "\n" \
  371. " " #asm_op " %0, %2 \n" \
  372. " scd %0, %1 \n" \
  373. " .set mips0 \n" \
  374. : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
  375. : "Ir" (i)); \
  376. } while (unlikely(!temp)); \
  377. } else { \
  378. unsigned long flags; \
  379. \
  380. raw_local_irq_save(flags); \
  381. v->counter c_op i; \
  382. raw_local_irq_restore(flags); \
  383. } \
  384. }
  385. #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
  386. static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
  387. { \
  388. long result; \
  389. \
  390. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  391. long temp; \
  392. \
  393. __asm__ __volatile__( \
  394. " .set arch=r4000 \n" \
  395. "1: lld %1, %2 # atomic64_" #op "_return\n" \
  396. " " #asm_op " %0, %1, %3 \n" \
  397. " scd %0, %2 \n" \
  398. " beqzl %0, 1b \n" \
  399. " " #asm_op " %0, %1, %3 \n" \
  400. " .set mips0 \n" \
  401. : "=&r" (result), "=&r" (temp), \
  402. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  403. : "Ir" (i)); \
  404. } else if (kernel_uses_llsc) { \
  405. long temp; \
  406. \
  407. do { \
  408. __asm__ __volatile__( \
  409. " .set "MIPS_ISA_LEVEL" \n" \
  410. " lld %1, %2 # atomic64_" #op "_return\n" \
  411. " " #asm_op " %0, %1, %3 \n" \
  412. " scd %0, %2 \n" \
  413. " .set mips0 \n" \
  414. : "=&r" (result), "=&r" (temp), \
  415. "=" GCC_OFF_SMALL_ASM() (v->counter) \
  416. : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
  417. : "memory"); \
  418. } while (unlikely(!result)); \
  419. \
  420. result = temp; result c_op i; \
  421. } else { \
  422. unsigned long flags; \
  423. \
  424. raw_local_irq_save(flags); \
  425. result = v->counter; \
  426. result c_op i; \
  427. v->counter = result; \
  428. raw_local_irq_restore(flags); \
  429. } \
  430. \
  431. return result; \
  432. }
  433. #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
  434. static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \
  435. { \
  436. long result; \
  437. \
  438. if (kernel_uses_llsc && R10000_LLSC_WAR) { \
  439. long temp; \
  440. \
  441. __asm__ __volatile__( \
  442. " .set arch=r4000 \n" \
  443. "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
  444. " " #asm_op " %0, %1, %3 \n" \
  445. " scd %0, %2 \n" \
  446. " beqzl %0, 1b \n" \
  447. " move %0, %1 \n" \
  448. " .set mips0 \n" \
  449. : "=&r" (result), "=&r" (temp), \
  450. "+" GCC_OFF_SMALL_ASM() (v->counter) \
  451. : "Ir" (i)); \
  452. } else if (kernel_uses_llsc) { \
  453. long temp; \
  454. \
  455. do { \
  456. __asm__ __volatile__( \
  457. " .set "MIPS_ISA_LEVEL" \n" \
  458. " lld %1, %2 # atomic64_fetch_" #op "\n" \
  459. " " #asm_op " %0, %1, %3 \n" \
  460. " scd %0, %2 \n" \
  461. " .set mips0 \n" \
  462. : "=&r" (result), "=&r" (temp), \
  463. "=" GCC_OFF_SMALL_ASM() (v->counter) \
  464. : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
  465. : "memory"); \
  466. } while (unlikely(!result)); \
  467. \
  468. result = temp; \
  469. } else { \
  470. unsigned long flags; \
  471. \
  472. raw_local_irq_save(flags); \
  473. result = v->counter; \
  474. v->counter c_op i; \
  475. raw_local_irq_restore(flags); \
  476. } \
  477. \
  478. return result; \
  479. }
  480. #define ATOMIC64_OPS(op, c_op, asm_op) \
  481. ATOMIC64_OP(op, c_op, asm_op) \
  482. ATOMIC64_OP_RETURN(op, c_op, asm_op) \
  483. ATOMIC64_FETCH_OP(op, c_op, asm_op)
  484. ATOMIC64_OPS(add, +=, daddu)
  485. ATOMIC64_OPS(sub, -=, dsubu)
  486. #define atomic64_add_return_relaxed atomic64_add_return_relaxed
  487. #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
  488. #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
  489. #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
  490. #undef ATOMIC64_OPS
  491. #define ATOMIC64_OPS(op, c_op, asm_op) \
  492. ATOMIC64_OP(op, c_op, asm_op) \
  493. ATOMIC64_FETCH_OP(op, c_op, asm_op)
  494. ATOMIC64_OPS(and, &=, and)
  495. ATOMIC64_OPS(or, |=, or)
  496. ATOMIC64_OPS(xor, ^=, xor)
  497. #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
  498. #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
  499. #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
  500. #undef ATOMIC64_OPS
  501. #undef ATOMIC64_FETCH_OP
  502. #undef ATOMIC64_OP_RETURN
  503. #undef ATOMIC64_OP
  504. /*
  505. * atomic64_sub_if_positive - conditionally subtract integer from atomic
  506. * variable
  507. * @i: integer value to subtract
  508. * @v: pointer of type atomic64_t
  509. *
  510. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  511. * The function returns the old value of @v minus @i.
  512. */
  513. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  514. {
  515. long result;
  516. smp_mb__before_llsc();
  517. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  518. long temp;
  519. __asm__ __volatile__(
  520. " .set arch=r4000 \n"
  521. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  522. " dsubu %0, %1, %3 \n"
  523. " bltz %0, 1f \n"
  524. " scd %0, %2 \n"
  525. " .set noreorder \n"
  526. " beqzl %0, 1b \n"
  527. " dsubu %0, %1, %3 \n"
  528. " .set reorder \n"
  529. "1: \n"
  530. " .set mips0 \n"
  531. : "=&r" (result), "=&r" (temp),
  532. "=" GCC_OFF_SMALL_ASM() (v->counter)
  533. : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
  534. : "memory");
  535. } else if (kernel_uses_llsc) {
  536. long temp;
  537. __asm__ __volatile__(
  538. " .set "MIPS_ISA_LEVEL" \n"
  539. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  540. " dsubu %0, %1, %3 \n"
  541. " bltz %0, 1f \n"
  542. " scd %0, %2 \n"
  543. " .set noreorder \n"
  544. " beqz %0, 1b \n"
  545. " dsubu %0, %1, %3 \n"
  546. " .set reorder \n"
  547. "1: \n"
  548. " .set mips0 \n"
  549. : "=&r" (result), "=&r" (temp),
  550. "+" GCC_OFF_SMALL_ASM() (v->counter)
  551. : "Ir" (i));
  552. } else {
  553. unsigned long flags;
  554. raw_local_irq_save(flags);
  555. result = v->counter;
  556. result -= i;
  557. if (result >= 0)
  558. v->counter = result;
  559. raw_local_irq_restore(flags);
  560. }
  561. smp_llsc_mb();
  562. return result;
  563. }
  564. #define atomic64_cmpxchg(v, o, n) \
  565. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  566. #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
  567. /**
  568. * atomic64_add_unless - add unless the number is a given value
  569. * @v: pointer of type atomic64_t
  570. * @a: the amount to add to v...
  571. * @u: ...unless v is equal to u.
  572. *
  573. * Atomically adds @a to @v, so long as it was not @u.
  574. * Returns true iff @v was not @u.
  575. */
  576. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  577. {
  578. long c, old;
  579. c = atomic64_read(v);
  580. for (;;) {
  581. if (unlikely(c == (u)))
  582. break;
  583. old = atomic64_cmpxchg((v), c, c + (a));
  584. if (likely(old == c))
  585. break;
  586. c = old;
  587. }
  588. return c != (u);
  589. }
  590. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  591. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  592. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  593. /*
  594. * atomic64_sub_and_test - subtract value from variable and test result
  595. * @i: integer value to subtract
  596. * @v: pointer of type atomic64_t
  597. *
  598. * Atomically subtracts @i from @v and returns
  599. * true if the result is zero, or false for all
  600. * other cases.
  601. */
  602. #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
  603. /*
  604. * atomic64_inc_and_test - increment and test
  605. * @v: pointer of type atomic64_t
  606. *
  607. * Atomically increments @v by 1
  608. * and returns true if the result is zero, or false for all
  609. * other cases.
  610. */
  611. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  612. /*
  613. * atomic64_dec_and_test - decrement by 1 and test
  614. * @v: pointer of type atomic64_t
  615. *
  616. * Atomically decrements @v by 1 and
  617. * returns true if the result is 0, or false for all other
  618. * cases.
  619. */
  620. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  621. /*
  622. * atomic64_dec_if_positive - decrement by 1 if old value positive
  623. * @v: pointer of type atomic64_t
  624. */
  625. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  626. /*
  627. * atomic64_inc - increment atomic variable
  628. * @v: pointer of type atomic64_t
  629. *
  630. * Atomically increments @v by 1.
  631. */
  632. #define atomic64_inc(v) atomic64_add(1, (v))
  633. /*
  634. * atomic64_dec - decrement and test
  635. * @v: pointer of type atomic64_t
  636. *
  637. * Atomically decrements @v by 1.
  638. */
  639. #define atomic64_dec(v) atomic64_sub(1, (v))
  640. /*
  641. * atomic64_add_negative - add and test if negative
  642. * @v: pointer of type atomic64_t
  643. * @i: integer value to add
  644. *
  645. * Atomically adds @i to @v and returns true
  646. * if the result is negative, or false when
  647. * result is greater than or equal to zero.
  648. */
  649. #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
  650. #endif /* CONFIG_64BIT */
  651. #endif /* _ASM_ATOMIC_H */