cmpxchg-grb.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_SH_CMPXCHG_GRB_H
  3. #define __ASM_SH_CMPXCHG_GRB_H
  4. static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
  5. {
  6. unsigned long retval;
  7. __asm__ __volatile__ (
  8. " .align 2 \n\t"
  9. " mova 1f, r0 \n\t" /* r0 = end point */
  10. " nop \n\t"
  11. " mov r15, r1 \n\t" /* r1 = saved sp */
  12. " mov #-4, r15 \n\t" /* LOGIN */
  13. " mov.l @%1, %0 \n\t" /* load old value */
  14. " mov.l %2, @%1 \n\t" /* store new value */
  15. "1: mov r1, r15 \n\t" /* LOGOUT */
  16. : "=&r" (retval),
  17. "+r" (m),
  18. "+r" (val) /* inhibit r15 overloading */
  19. :
  20. : "memory", "r0", "r1");
  21. return retval;
  22. }
  23. static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
  24. {
  25. unsigned long retval;
  26. __asm__ __volatile__ (
  27. " .align 2 \n\t"
  28. " mova 1f, r0 \n\t" /* r0 = end point */
  29. " mov r15, r1 \n\t" /* r1 = saved sp */
  30. " mov #-6, r15 \n\t" /* LOGIN */
  31. " mov.w @%1, %0 \n\t" /* load old value */
  32. " extu.w %0, %0 \n\t" /* extend as unsigned */
  33. " mov.w %2, @%1 \n\t" /* store new value */
  34. "1: mov r1, r15 \n\t" /* LOGOUT */
  35. : "=&r" (retval),
  36. "+r" (m),
  37. "+r" (val) /* inhibit r15 overloading */
  38. :
  39. : "memory" , "r0", "r1");
  40. return retval;
  41. }
  42. static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
  43. {
  44. unsigned long retval;
  45. __asm__ __volatile__ (
  46. " .align 2 \n\t"
  47. " mova 1f, r0 \n\t" /* r0 = end point */
  48. " mov r15, r1 \n\t" /* r1 = saved sp */
  49. " mov #-6, r15 \n\t" /* LOGIN */
  50. " mov.b @%1, %0 \n\t" /* load old value */
  51. " extu.b %0, %0 \n\t" /* extend as unsigned */
  52. " mov.b %2, @%1 \n\t" /* store new value */
  53. "1: mov r1, r15 \n\t" /* LOGOUT */
  54. : "=&r" (retval),
  55. "+r" (m),
  56. "+r" (val) /* inhibit r15 overloading */
  57. :
  58. : "memory" , "r0", "r1");
  59. return retval;
  60. }
  61. static inline unsigned long __cmpxchg_u32(volatile int *m, unsigned long old,
  62. unsigned long new)
  63. {
  64. unsigned long retval;
  65. __asm__ __volatile__ (
  66. " .align 2 \n\t"
  67. " mova 1f, r0 \n\t" /* r0 = end point */
  68. " nop \n\t"
  69. " mov r15, r1 \n\t" /* r1 = saved sp */
  70. " mov #-8, r15 \n\t" /* LOGIN */
  71. " mov.l @%3, %0 \n\t" /* load old value */
  72. " cmp/eq %0, %1 \n\t"
  73. " bf 1f \n\t" /* if not equal */
  74. " mov.l %2, @%3 \n\t" /* store new value */
  75. "1: mov r1, r15 \n\t" /* LOGOUT */
  76. : "=&r" (retval),
  77. "+r" (old), "+r" (new) /* old or new can be r15 */
  78. : "r" (m)
  79. : "memory" , "r0", "r1", "t");
  80. return retval;
  81. }
  82. #endif /* __ASM_SH_CMPXCHG_GRB_H */