atomic64_64.h 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244
  1. #ifndef _ASM_X86_ATOMIC64_64_H
  2. #define _ASM_X86_ATOMIC64_64_H
  3. #include <linux/types.h>
  4. #include <asm/alternative.h>
  5. #include <asm/cmpxchg.h>
  6. /* The 64-bit atomic type */
  7. #define ATOMIC64_INIT(i) { (i) }
  8. /**
  9. * atomic64_read - read atomic64 variable
  10. * @v: pointer of type atomic64_t
  11. *
  12. * Atomically reads the value of @v.
  13. * Doesn't imply a read memory barrier.
  14. */
  15. static inline long atomic64_read(const atomic64_t *v)
  16. {
  17. return (*(volatile long *)&(v)->counter);
  18. }
  19. /**
  20. * atomic64_set - set atomic64 variable
  21. * @v: pointer to type atomic64_t
  22. * @i: required value
  23. *
  24. * Atomically sets the value of @v to @i.
  25. */
  26. static inline void atomic64_set(atomic64_t *v, long i)
  27. {
  28. v->counter = i;
  29. }
  30. /**
  31. * atomic64_add - add integer to atomic64 variable
  32. * @i: integer value to add
  33. * @v: pointer to type atomic64_t
  34. *
  35. * Atomically adds @i to @v.
  36. */
  37. static inline void atomic64_add(long i, atomic64_t *v)
  38. {
  39. asm volatile(LOCK_PREFIX "addq %1,%0"
  40. : "=m" (v->counter)
  41. : "er" (i), "m" (v->counter));
  42. }
  43. /**
  44. * atomic64_sub - subtract the atomic64 variable
  45. * @i: integer value to subtract
  46. * @v: pointer to type atomic64_t
  47. *
  48. * Atomically subtracts @i from @v.
  49. */
  50. static inline void atomic64_sub(long i, atomic64_t *v)
  51. {
  52. asm volatile(LOCK_PREFIX "subq %1,%0"
  53. : "=m" (v->counter)
  54. : "er" (i), "m" (v->counter));
  55. }
  56. /**
  57. * atomic64_sub_and_test - subtract value from variable and test result
  58. * @i: integer value to subtract
  59. * @v: pointer to type atomic64_t
  60. *
  61. * Atomically subtracts @i from @v and returns
  62. * true if the result is zero, or false for all
  63. * other cases.
  64. */
  65. static inline int atomic64_sub_and_test(long i, atomic64_t *v)
  66. {
  67. unsigned char c;
  68. asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
  69. : "=m" (v->counter), "=qm" (c)
  70. : "er" (i), "m" (v->counter) : "memory");
  71. return c;
  72. }
  73. /**
  74. * atomic64_inc - increment atomic64 variable
  75. * @v: pointer to type atomic64_t
  76. *
  77. * Atomically increments @v by 1.
  78. */
  79. static inline void atomic64_inc(atomic64_t *v)
  80. {
  81. asm volatile(LOCK_PREFIX "incq %0"
  82. : "=m" (v->counter)
  83. : "m" (v->counter));
  84. }
  85. /**
  86. * atomic64_dec - decrement atomic64 variable
  87. * @v: pointer to type atomic64_t
  88. *
  89. * Atomically decrements @v by 1.
  90. */
  91. static inline void atomic64_dec(atomic64_t *v)
  92. {
  93. asm volatile(LOCK_PREFIX "decq %0"
  94. : "=m" (v->counter)
  95. : "m" (v->counter));
  96. }
  97. /**
  98. * atomic64_dec_and_test - decrement and test
  99. * @v: pointer to type atomic64_t
  100. *
  101. * Atomically decrements @v by 1 and
  102. * returns true if the result is 0, or false for all other
  103. * cases.
  104. */
  105. static inline int atomic64_dec_and_test(atomic64_t *v)
  106. {
  107. unsigned char c;
  108. asm volatile(LOCK_PREFIX "decq %0; sete %1"
  109. : "=m" (v->counter), "=qm" (c)
  110. : "m" (v->counter) : "memory");
  111. return c != 0;
  112. }
  113. /**
  114. * atomic64_inc_and_test - increment and test
  115. * @v: pointer to type atomic64_t
  116. *
  117. * Atomically increments @v by 1
  118. * and returns true if the result is zero, or false for all
  119. * other cases.
  120. */
  121. static inline int atomic64_inc_and_test(atomic64_t *v)
  122. {
  123. unsigned char c;
  124. asm volatile(LOCK_PREFIX "incq %0; sete %1"
  125. : "=m" (v->counter), "=qm" (c)
  126. : "m" (v->counter) : "memory");
  127. return c != 0;
  128. }
  129. /**
  130. * atomic64_add_negative - add and test if negative
  131. * @i: integer value to add
  132. * @v: pointer to type atomic64_t
  133. *
  134. * Atomically adds @i to @v and returns true
  135. * if the result is negative, or false when
  136. * result is greater than or equal to zero.
  137. */
  138. static inline int atomic64_add_negative(long i, atomic64_t *v)
  139. {
  140. unsigned char c;
  141. asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
  142. : "=m" (v->counter), "=qm" (c)
  143. : "er" (i), "m" (v->counter) : "memory");
  144. return c;
  145. }
  146. /**
  147. * atomic64_add_return - add and return
  148. * @i: integer value to add
  149. * @v: pointer to type atomic64_t
  150. *
  151. * Atomically adds @i to @v and returns @i + @v
  152. */
  153. static inline long atomic64_add_return(long i, atomic64_t *v)
  154. {
  155. return i + xadd(&v->counter, i);
  156. }
  157. static inline long atomic64_sub_return(long i, atomic64_t *v)
  158. {
  159. return atomic64_add_return(-i, v);
  160. }
  161. #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
  162. #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
  163. static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
  164. {
  165. return cmpxchg(&v->counter, old, new);
  166. }
  167. static inline long atomic64_xchg(atomic64_t *v, long new)
  168. {
  169. return xchg(&v->counter, new);
  170. }
  171. /**
  172. * atomic64_add_unless - add unless the number is a given value
  173. * @v: pointer of type atomic64_t
  174. * @a: the amount to add to v...
  175. * @u: ...unless v is equal to u.
  176. *
  177. * Atomically adds @a to @v, so long as it was not @u.
  178. * Returns the old value of @v.
  179. */
  180. static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
  181. {
  182. long c, old;
  183. c = atomic64_read(v);
  184. for (;;) {
  185. if (unlikely(c == (u)))
  186. break;
  187. old = atomic64_cmpxchg((v), c, c + (a));
  188. if (likely(old == c))
  189. break;
  190. c = old;
  191. }
  192. return c != (u);
  193. }
  194. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  195. /*
  196. * atomic64_dec_if_positive - decrement by 1 if old value positive
  197. * @v: pointer of type atomic_t
  198. *
  199. * The function returns the old value of *v minus 1, even if
  200. * the atomic variable, v, was not decremented.
  201. */
  202. static inline long atomic64_dec_if_positive(atomic64_t *v)
  203. {
  204. long c, old, dec;
  205. c = atomic64_read(v);
  206. for (;;) {
  207. dec = c - 1;
  208. if (unlikely(dec < 0))
  209. break;
  210. old = atomic64_cmpxchg((v), c, dec);
  211. if (likely(old == c))
  212. break;
  213. c = old;
  214. }
  215. return dec;
  216. }
  217. #endif /* _ASM_X86_ATOMIC64_64_H */