atomic.h 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ARCH_H8300_ATOMIC__
  3. #define __ARCH_H8300_ATOMIC__
  4. #include <linux/types.h>
  5. #include <asm/cmpxchg.h>
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. #define ATOMIC_INIT(i) { (i) }
  11. #define atomic_read(v) READ_ONCE((v)->counter)
  12. #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
  13. #include <linux/kernel.h>
  14. #define ATOMIC_OP_RETURN(op, c_op) \
  15. static inline int atomic_##op##_return(int i, atomic_t *v) \
  16. { \
  17. h8300flags flags; \
  18. int ret; \
  19. \
  20. flags = arch_local_irq_save(); \
  21. ret = v->counter c_op i; \
  22. arch_local_irq_restore(flags); \
  23. return ret; \
  24. }
  25. #define ATOMIC_FETCH_OP(op, c_op) \
  26. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  27. { \
  28. h8300flags flags; \
  29. int ret; \
  30. \
  31. flags = arch_local_irq_save(); \
  32. ret = v->counter; \
  33. v->counter c_op i; \
  34. arch_local_irq_restore(flags); \
  35. return ret; \
  36. }
  37. #define ATOMIC_OP(op, c_op) \
  38. static inline void atomic_##op(int i, atomic_t *v) \
  39. { \
  40. h8300flags flags; \
  41. \
  42. flags = arch_local_irq_save(); \
  43. v->counter c_op i; \
  44. arch_local_irq_restore(flags); \
  45. }
  46. ATOMIC_OP_RETURN(add, +=)
  47. ATOMIC_OP_RETURN(sub, -=)
  48. #define ATOMIC_OPS(op, c_op) \
  49. ATOMIC_OP(op, c_op) \
  50. ATOMIC_FETCH_OP(op, c_op)
  51. ATOMIC_OPS(and, &=)
  52. ATOMIC_OPS(or, |=)
  53. ATOMIC_OPS(xor, ^=)
  54. ATOMIC_OPS(add, +=)
  55. ATOMIC_OPS(sub, -=)
  56. #undef ATOMIC_OPS
  57. #undef ATOMIC_OP_RETURN
  58. #undef ATOMIC_OP
  59. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  60. #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
  61. #define atomic_inc_return(v) atomic_add_return(1, v)
  62. #define atomic_dec_return(v) atomic_sub_return(1, v)
  63. #define atomic_inc(v) (void)atomic_inc_return(v)
  64. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  65. #define atomic_dec(v) (void)atomic_dec_return(v)
  66. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  67. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  68. {
  69. int ret;
  70. h8300flags flags;
  71. flags = arch_local_irq_save();
  72. ret = v->counter;
  73. if (likely(ret == old))
  74. v->counter = new;
  75. arch_local_irq_restore(flags);
  76. return ret;
  77. }
  78. static inline int __atomic_add_unless(atomic_t *v, int a, int u)
  79. {
  80. int ret;
  81. h8300flags flags;
  82. flags = arch_local_irq_save();
  83. ret = v->counter;
  84. if (ret != u)
  85. v->counter += a;
  86. arch_local_irq_restore(flags);
  87. return ret;
  88. }
  89. #endif /* __ARCH_H8300_ATOMIC __ */