atomic32.c 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167
  1. /*
  2. * atomic32.c: 32-bit atomic_t implementation
  3. *
  4. * Copyright (C) 2004 Keith M Wesolowski
  5. * Copyright (C) 2007 Kyle McMartin
  6. *
  7. * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
  8. */
  9. #include <linux/atomic.h>
  10. #include <linux/spinlock.h>
  11. #include <linux/module.h>
  12. #ifdef CONFIG_SMP
  13. #define ATOMIC_HASH_SIZE 4
  14. #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
  15. spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
  16. [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
  17. };
  18. #else /* SMP */
  19. static DEFINE_SPINLOCK(dummy);
  20. #define ATOMIC_HASH_SIZE 1
  21. #define ATOMIC_HASH(a) (&dummy)
  22. #endif /* SMP */
  23. #define ATOMIC_OP(op, cop) \
  24. int atomic_##op##_return(int i, atomic_t *v) \
  25. { \
  26. int ret; \
  27. unsigned long flags; \
  28. spin_lock_irqsave(ATOMIC_HASH(v), flags); \
  29. \
  30. ret = (v->counter cop i); \
  31. \
  32. spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
  33. return ret; \
  34. } \
  35. EXPORT_SYMBOL(atomic_##op##_return);
  36. ATOMIC_OP(add, +=)
  37. #undef ATOMIC_OP
  38. int atomic_xchg(atomic_t *v, int new)
  39. {
  40. int ret;
  41. unsigned long flags;
  42. spin_lock_irqsave(ATOMIC_HASH(v), flags);
  43. ret = v->counter;
  44. v->counter = new;
  45. spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
  46. return ret;
  47. }
  48. EXPORT_SYMBOL(atomic_xchg);
  49. int atomic_cmpxchg(atomic_t *v, int old, int new)
  50. {
  51. int ret;
  52. unsigned long flags;
  53. spin_lock_irqsave(ATOMIC_HASH(v), flags);
  54. ret = v->counter;
  55. if (likely(ret == old))
  56. v->counter = new;
  57. spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
  58. return ret;
  59. }
  60. EXPORT_SYMBOL(atomic_cmpxchg);
  61. int __atomic_add_unless(atomic_t *v, int a, int u)
  62. {
  63. int ret;
  64. unsigned long flags;
  65. spin_lock_irqsave(ATOMIC_HASH(v), flags);
  66. ret = v->counter;
  67. if (ret != u)
  68. v->counter += a;
  69. spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
  70. return ret;
  71. }
  72. EXPORT_SYMBOL(__atomic_add_unless);
  73. /* Atomic operations are already serializing */
  74. void atomic_set(atomic_t *v, int i)
  75. {
  76. unsigned long flags;
  77. spin_lock_irqsave(ATOMIC_HASH(v), flags);
  78. v->counter = i;
  79. spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
  80. }
  81. EXPORT_SYMBOL(atomic_set);
  82. unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
  83. {
  84. unsigned long old, flags;
  85. spin_lock_irqsave(ATOMIC_HASH(addr), flags);
  86. old = *addr;
  87. *addr = old | mask;
  88. spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
  89. return old & mask;
  90. }
  91. EXPORT_SYMBOL(___set_bit);
  92. unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
  93. {
  94. unsigned long old, flags;
  95. spin_lock_irqsave(ATOMIC_HASH(addr), flags);
  96. old = *addr;
  97. *addr = old & ~mask;
  98. spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
  99. return old & mask;
  100. }
  101. EXPORT_SYMBOL(___clear_bit);
  102. unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
  103. {
  104. unsigned long old, flags;
  105. spin_lock_irqsave(ATOMIC_HASH(addr), flags);
  106. old = *addr;
  107. *addr = old ^ mask;
  108. spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
  109. return old & mask;
  110. }
  111. EXPORT_SYMBOL(___change_bit);
  112. unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
  113. {
  114. unsigned long flags;
  115. u32 prev;
  116. spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
  117. if ((prev = *ptr) == old)
  118. *ptr = new;
  119. spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
  120. return (unsigned long)prev;
  121. }
  122. EXPORT_SYMBOL(__cmpxchg_u32);
  123. unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
  124. {
  125. unsigned long flags;
  126. u32 prev;
  127. spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
  128. prev = *ptr;
  129. *ptr = new;
  130. spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
  131. return (unsigned long)prev;
  132. }
  133. EXPORT_SYMBOL(__xchg_u32);