cmpxchg-grb.h 2.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495
  1. #ifndef __ASM_SH_CMPXCHG_GRB_H
  2. #define __ASM_SH_CMPXCHG_GRB_H
  3. static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
  4. {
  5. unsigned long retval;
  6. __asm__ __volatile__ (
  7. " .align 2 \n\t"
  8. " mova 1f, r0 \n\t" /* r0 = end point */
  9. " nop \n\t"
  10. " mov r15, r1 \n\t" /* r1 = saved sp */
  11. " mov #-4, r15 \n\t" /* LOGIN */
  12. " mov.l @%1, %0 \n\t" /* load old value */
  13. " mov.l %2, @%1 \n\t" /* store new value */
  14. "1: mov r1, r15 \n\t" /* LOGOUT */
  15. : "=&r" (retval),
  16. "+r" (m),
  17. "+r" (val) /* inhibit r15 overloading */
  18. :
  19. : "memory", "r0", "r1");
  20. return retval;
  21. }
  22. static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
  23. {
  24. unsigned long retval;
  25. __asm__ __volatile__ (
  26. " .align 2 \n\t"
  27. " mova 1f, r0 \n\t" /* r0 = end point */
  28. " mov r15, r1 \n\t" /* r1 = saved sp */
  29. " mov #-6, r15 \n\t" /* LOGIN */
  30. " mov.w @%1, %0 \n\t" /* load old value */
  31. " extu.w %0, %0 \n\t" /* extend as unsigned */
  32. " mov.w %2, @%1 \n\t" /* store new value */
  33. "1: mov r1, r15 \n\t" /* LOGOUT */
  34. : "=&r" (retval),
  35. "+r" (m),
  36. "+r" (val) /* inhibit r15 overloading */
  37. :
  38. : "memory" , "r0", "r1");
  39. return retval;
  40. }
  41. static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
  42. {
  43. unsigned long retval;
  44. __asm__ __volatile__ (
  45. " .align 2 \n\t"
  46. " mova 1f, r0 \n\t" /* r0 = end point */
  47. " mov r15, r1 \n\t" /* r1 = saved sp */
  48. " mov #-6, r15 \n\t" /* LOGIN */
  49. " mov.b @%1, %0 \n\t" /* load old value */
  50. " extu.b %0, %0 \n\t" /* extend as unsigned */
  51. " mov.b %2, @%1 \n\t" /* store new value */
  52. "1: mov r1, r15 \n\t" /* LOGOUT */
  53. : "=&r" (retval),
  54. "+r" (m),
  55. "+r" (val) /* inhibit r15 overloading */
  56. :
  57. : "memory" , "r0", "r1");
  58. return retval;
  59. }
  60. static inline unsigned long __cmpxchg_u32(volatile int *m, unsigned long old,
  61. unsigned long new)
  62. {
  63. unsigned long retval;
  64. __asm__ __volatile__ (
  65. " .align 2 \n\t"
  66. " mova 1f, r0 \n\t" /* r0 = end point */
  67. " nop \n\t"
  68. " mov r15, r1 \n\t" /* r1 = saved sp */
  69. " mov #-8, r15 \n\t" /* LOGIN */
  70. " mov.l @%3, %0 \n\t" /* load old value */
  71. " cmp/eq %0, %1 \n\t"
  72. " bf 1f \n\t" /* if not equal */
  73. " mov.l %2, @%3 \n\t" /* store new value */
  74. "1: mov r1, r15 \n\t" /* LOGOUT */
  75. : "=&r" (retval),
  76. "+r" (old), "+r" (new) /* old or new can be r15 */
  77. : "r" (m)
  78. : "memory" , "r0", "r1", "t");
  79. return retval;
  80. }
  81. #endif /* __ASM_SH_CMPXCHG_GRB_H */