bitops.c 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. /*
  2. * bitops.c: atomic operations which got too long to be inlined all over
  3. * the place.
  4. *
  5. * Copyright 1999 Philipp Rumpf (prumpf@tux.org)
  6. * Copyright 2000 Grant Grundler (grundler@cup.hp.com)
  7. */
  8. #include <linux/kernel.h>
  9. #include <linux/spinlock.h>
  10. #include <linux/atomic.h>
  11. #ifdef CONFIG_SMP
  12. arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
  13. [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED
  14. };
  15. #endif
  16. #ifdef CONFIG_64BIT
  17. unsigned long __xchg64(unsigned long x, unsigned long *ptr)
  18. {
  19. unsigned long temp, flags;
  20. _atomic_spin_lock_irqsave(ptr, flags);
  21. temp = *ptr;
  22. *ptr = x;
  23. _atomic_spin_unlock_irqrestore(ptr, flags);
  24. return temp;
  25. }
  26. #endif
  27. unsigned long __xchg32(int x, int *ptr)
  28. {
  29. unsigned long flags;
  30. long temp;
  31. _atomic_spin_lock_irqsave(ptr, flags);
  32. temp = (long) *ptr; /* XXX - sign extension wanted? */
  33. *ptr = x;
  34. _atomic_spin_unlock_irqrestore(ptr, flags);
  35. return (unsigned long)temp;
  36. }
  37. unsigned long __xchg8(char x, char *ptr)
  38. {
  39. unsigned long flags;
  40. long temp;
  41. _atomic_spin_lock_irqsave(ptr, flags);
  42. temp = (long) *ptr; /* XXX - sign extension wanted? */
  43. *ptr = x;
  44. _atomic_spin_unlock_irqrestore(ptr, flags);
  45. return (unsigned long)temp;
  46. }
  47. u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new)
  48. {
  49. unsigned long flags;
  50. u64 prev;
  51. _atomic_spin_lock_irqsave(ptr, flags);
  52. if ((prev = *ptr) == old)
  53. *ptr = new;
  54. _atomic_spin_unlock_irqrestore(ptr, flags);
  55. return prev;
  56. }
  57. unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new)
  58. {
  59. unsigned long flags;
  60. unsigned int prev;
  61. _atomic_spin_lock_irqsave(ptr, flags);
  62. if ((prev = *ptr) == old)
  63. *ptr = new;
  64. _atomic_spin_unlock_irqrestore(ptr, flags);
  65. return (unsigned long)prev;
  66. }