cmpxchg.h 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ARCH_H8300_CMPXCHG__
  3. #define __ARCH_H8300_CMPXCHG__
  4. #include <linux/irqflags.h>
  5. #define xchg(ptr, x) \
  6. ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
  7. sizeof(*(ptr))))
  8. struct __xchg_dummy { unsigned long a[100]; };
  9. #define __xg(x) ((volatile struct __xchg_dummy *)(x))
  10. static inline unsigned long __xchg(unsigned long x,
  11. volatile void *ptr, int size)
  12. {
  13. unsigned long tmp, flags;
  14. local_irq_save(flags);
  15. switch (size) {
  16. case 1:
  17. __asm__ __volatile__
  18. ("mov.b %2,%0\n\t"
  19. "mov.b %1,%2"
  20. : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
  21. break;
  22. case 2:
  23. __asm__ __volatile__
  24. ("mov.w %2,%0\n\t"
  25. "mov.w %1,%2"
  26. : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
  27. break;
  28. case 4:
  29. __asm__ __volatile__
  30. ("mov.l %2,%0\n\t"
  31. "mov.l %1,%2"
  32. : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
  33. break;
  34. default:
  35. tmp = 0;
  36. }
  37. local_irq_restore(flags);
  38. return tmp;
  39. }
  40. #include <asm-generic/cmpxchg-local.h>
  41. /*
  42. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  43. * them available.
  44. */
  45. #define cmpxchg_local(ptr, o, n) \
  46. ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), \
  47. (unsigned long)(o), \
  48. (unsigned long)(n), \
  49. sizeof(*(ptr))))
  50. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  51. #ifndef CONFIG_SMP
  52. #include <asm-generic/cmpxchg.h>
  53. #endif
  54. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  55. #endif /* __ARCH_H8300_CMPXCHG__ */