atomic.h 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright IBM Corp. 1999, 2016
  4. * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  5. * Denis Joseph Barrow,
  6. * Arnd Bergmann,
  7. */
  8. #ifndef __ARCH_S390_ATOMIC__
  9. #define __ARCH_S390_ATOMIC__
  10. #include <linux/compiler.h>
  11. #include <linux/types.h>
  12. #include <asm/atomic_ops.h>
  13. #include <asm/barrier.h>
  14. #include <asm/cmpxchg.h>
  15. #define ATOMIC_INIT(i) { (i) }
  16. static inline int atomic_read(const atomic_t *v)
  17. {
  18. int c;
  19. asm volatile(
  20. " l %0,%1\n"
  21. : "=d" (c) : "Q" (v->counter));
  22. return c;
  23. }
  24. static inline void atomic_set(atomic_t *v, int i)
  25. {
  26. asm volatile(
  27. " st %1,%0\n"
  28. : "=Q" (v->counter) : "d" (i));
  29. }
  30. static inline int atomic_add_return(int i, atomic_t *v)
  31. {
  32. return __atomic_add_barrier(i, &v->counter) + i;
  33. }
  34. static inline int atomic_fetch_add(int i, atomic_t *v)
  35. {
  36. return __atomic_add_barrier(i, &v->counter);
  37. }
  38. static inline void atomic_add(int i, atomic_t *v)
  39. {
  40. #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  41. if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
  42. __atomic_add_const(i, &v->counter);
  43. return;
  44. }
  45. #endif
  46. __atomic_add(i, &v->counter);
  47. }
  48. #define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
  49. #define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
  50. #define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
  51. #define ATOMIC_OPS(op) \
  52. static inline void atomic_##op(int i, atomic_t *v) \
  53. { \
  54. __atomic_##op(i, &v->counter); \
  55. } \
  56. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  57. { \
  58. return __atomic_##op##_barrier(i, &v->counter); \
  59. }
  60. ATOMIC_OPS(and)
  61. ATOMIC_OPS(or)
  62. ATOMIC_OPS(xor)
  63. #undef ATOMIC_OPS
  64. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  65. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  66. {
  67. return __atomic_cmpxchg(&v->counter, old, new);
  68. }
  69. #define ATOMIC64_INIT(i) { (i) }
  70. static inline long atomic64_read(const atomic64_t *v)
  71. {
  72. long c;
  73. asm volatile(
  74. " lg %0,%1\n"
  75. : "=d" (c) : "Q" (v->counter));
  76. return c;
  77. }
  78. static inline void atomic64_set(atomic64_t *v, long i)
  79. {
  80. asm volatile(
  81. " stg %1,%0\n"
  82. : "=Q" (v->counter) : "d" (i));
  83. }
  84. static inline long atomic64_add_return(long i, atomic64_t *v)
  85. {
  86. return __atomic64_add_barrier(i, &v->counter) + i;
  87. }
  88. static inline long atomic64_fetch_add(long i, atomic64_t *v)
  89. {
  90. return __atomic64_add_barrier(i, &v->counter);
  91. }
  92. static inline void atomic64_add(long i, atomic64_t *v)
  93. {
  94. #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  95. if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
  96. __atomic64_add_const(i, &v->counter);
  97. return;
  98. }
  99. #endif
  100. __atomic64_add(i, &v->counter);
  101. }
  102. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  103. static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
  104. {
  105. return __atomic64_cmpxchg(&v->counter, old, new);
  106. }
  107. #define ATOMIC64_OPS(op) \
  108. static inline void atomic64_##op(long i, atomic64_t *v) \
  109. { \
  110. __atomic64_##op(i, &v->counter); \
  111. } \
  112. static inline long atomic64_fetch_##op(long i, atomic64_t *v) \
  113. { \
  114. return __atomic64_##op##_barrier(i, &v->counter); \
  115. }
  116. ATOMIC64_OPS(and)
  117. ATOMIC64_OPS(or)
  118. ATOMIC64_OPS(xor)
  119. #undef ATOMIC64_OPS
  120. #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v)
  121. #define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v)
  122. #define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
  123. #endif /* __ARCH_S390_ATOMIC__ */