bitops-grb.h 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_SH_BITOPS_GRB_H
  3. #define __ASM_SH_BITOPS_GRB_H
  4. static inline void set_bit(int nr, volatile void * addr)
  5. {
  6. int mask;
  7. volatile unsigned int *a = addr;
  8. unsigned long tmp;
  9. a += nr >> 5;
  10. mask = 1 << (nr & 0x1f);
  11. __asm__ __volatile__ (
  12. " .align 2 \n\t"
  13. " mova 1f, r0 \n\t" /* r0 = end point */
  14. " mov r15, r1 \n\t" /* r1 = saved sp */
  15. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  16. " mov.l @%1, %0 \n\t" /* load old value */
  17. " or %2, %0 \n\t" /* or */
  18. " mov.l %0, @%1 \n\t" /* store new value */
  19. "1: mov r1, r15 \n\t" /* LOGOUT */
  20. : "=&r" (tmp),
  21. "+r" (a)
  22. : "r" (mask)
  23. : "memory" , "r0", "r1");
  24. }
  25. static inline void clear_bit(int nr, volatile void * addr)
  26. {
  27. int mask;
  28. volatile unsigned int *a = addr;
  29. unsigned long tmp;
  30. a += nr >> 5;
  31. mask = ~(1 << (nr & 0x1f));
  32. __asm__ __volatile__ (
  33. " .align 2 \n\t"
  34. " mova 1f, r0 \n\t" /* r0 = end point */
  35. " mov r15, r1 \n\t" /* r1 = saved sp */
  36. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  37. " mov.l @%1, %0 \n\t" /* load old value */
  38. " and %2, %0 \n\t" /* and */
  39. " mov.l %0, @%1 \n\t" /* store new value */
  40. "1: mov r1, r15 \n\t" /* LOGOUT */
  41. : "=&r" (tmp),
  42. "+r" (a)
  43. : "r" (mask)
  44. : "memory" , "r0", "r1");
  45. }
  46. static inline void change_bit(int nr, volatile void * addr)
  47. {
  48. int mask;
  49. volatile unsigned int *a = addr;
  50. unsigned long tmp;
  51. a += nr >> 5;
  52. mask = 1 << (nr & 0x1f);
  53. __asm__ __volatile__ (
  54. " .align 2 \n\t"
  55. " mova 1f, r0 \n\t" /* r0 = end point */
  56. " mov r15, r1 \n\t" /* r1 = saved sp */
  57. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  58. " mov.l @%1, %0 \n\t" /* load old value */
  59. " xor %2, %0 \n\t" /* xor */
  60. " mov.l %0, @%1 \n\t" /* store new value */
  61. "1: mov r1, r15 \n\t" /* LOGOUT */
  62. : "=&r" (tmp),
  63. "+r" (a)
  64. : "r" (mask)
  65. : "memory" , "r0", "r1");
  66. }
  67. static inline int test_and_set_bit(int nr, volatile void * addr)
  68. {
  69. int mask, retval;
  70. volatile unsigned int *a = addr;
  71. unsigned long tmp;
  72. a += nr >> 5;
  73. mask = 1 << (nr & 0x1f);
  74. __asm__ __volatile__ (
  75. " .align 2 \n\t"
  76. " mova 1f, r0 \n\t" /* r0 = end point */
  77. " mov r15, r1 \n\t" /* r1 = saved sp */
  78. " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
  79. " mov.l @%2, %0 \n\t" /* load old value */
  80. " mov %0, %1 \n\t"
  81. " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
  82. " mov #-1, %1 \n\t" /* retvat = -1 */
  83. " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
  84. " or %3, %0 \n\t"
  85. " mov.l %0, @%2 \n\t" /* store new value */
  86. "1: mov r1, r15 \n\t" /* LOGOUT */
  87. : "=&r" (tmp),
  88. "=&r" (retval),
  89. "+r" (a)
  90. : "r" (mask)
  91. : "memory" , "r0", "r1" ,"t");
  92. return retval;
  93. }
  94. static inline int test_and_clear_bit(int nr, volatile void * addr)
  95. {
  96. int mask, retval,not_mask;
  97. volatile unsigned int *a = addr;
  98. unsigned long tmp;
  99. a += nr >> 5;
  100. mask = 1 << (nr & 0x1f);
  101. not_mask = ~mask;
  102. __asm__ __volatile__ (
  103. " .align 2 \n\t"
  104. " mova 1f, r0 \n\t" /* r0 = end point */
  105. " mov r15, r1 \n\t" /* r1 = saved sp */
  106. " mov #-14, r15 \n\t" /* LOGIN */
  107. " mov.l @%2, %0 \n\t" /* load old value */
  108. " mov %0, %1 \n\t" /* %1 = *a */
  109. " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
  110. " mov #-1, %1 \n\t" /* retvat = -1 */
  111. " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
  112. " and %4, %0 \n\t"
  113. " mov.l %0, @%2 \n\t" /* store new value */
  114. "1: mov r1, r15 \n\t" /* LOGOUT */
  115. : "=&r" (tmp),
  116. "=&r" (retval),
  117. "+r" (a)
  118. : "r" (mask),
  119. "r" (not_mask)
  120. : "memory" , "r0", "r1", "t");
  121. return retval;
  122. }
  123. static inline int test_and_change_bit(int nr, volatile void * addr)
  124. {
  125. int mask, retval;
  126. volatile unsigned int *a = addr;
  127. unsigned long tmp;
  128. a += nr >> 5;
  129. mask = 1 << (nr & 0x1f);
  130. __asm__ __volatile__ (
  131. " .align 2 \n\t"
  132. " mova 1f, r0 \n\t" /* r0 = end point */
  133. " mov r15, r1 \n\t" /* r1 = saved sp */
  134. " mov #-14, r15 \n\t" /* LOGIN */
  135. " mov.l @%2, %0 \n\t" /* load old value */
  136. " mov %0, %1 \n\t" /* %1 = *a */
  137. " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
  138. " mov #-1, %1 \n\t" /* retvat = -1 */
  139. " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
  140. " xor %3, %0 \n\t"
  141. " mov.l %0, @%2 \n\t" /* store new value */
  142. "1: mov r1, r15 \n\t" /* LOGOUT */
  143. : "=&r" (tmp),
  144. "=&r" (retval),
  145. "+r" (a)
  146. : "r" (mask)
  147. : "memory" , "r0", "r1", "t");
  148. return retval;
  149. }
  150. #include <asm-generic/bitops/non-atomic.h>
  151. #endif /* __ASM_SH_BITOPS_GRB_H */