bitops-llsc.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_SH_BITOPS_LLSC_H
  3. #define __ASM_SH_BITOPS_LLSC_H
  4. static inline void set_bit(int nr, volatile void *addr)
  5. {
  6. int mask;
  7. volatile unsigned int *a = addr;
  8. unsigned long tmp;
  9. a += nr >> 5;
  10. mask = 1 << (nr & 0x1f);
  11. __asm__ __volatile__ (
  12. "1: \n\t"
  13. "movli.l @%1, %0 ! set_bit \n\t"
  14. "or %2, %0 \n\t"
  15. "movco.l %0, @%1 \n\t"
  16. "bf 1b \n\t"
  17. : "=&z" (tmp)
  18. : "r" (a), "r" (mask)
  19. : "t", "memory"
  20. );
  21. }
  22. static inline void clear_bit(int nr, volatile void *addr)
  23. {
  24. int mask;
  25. volatile unsigned int *a = addr;
  26. unsigned long tmp;
  27. a += nr >> 5;
  28. mask = 1 << (nr & 0x1f);
  29. __asm__ __volatile__ (
  30. "1: \n\t"
  31. "movli.l @%1, %0 ! clear_bit \n\t"
  32. "and %2, %0 \n\t"
  33. "movco.l %0, @%1 \n\t"
  34. "bf 1b \n\t"
  35. : "=&z" (tmp)
  36. : "r" (a), "r" (~mask)
  37. : "t", "memory"
  38. );
  39. }
  40. static inline void change_bit(int nr, volatile void *addr)
  41. {
  42. int mask;
  43. volatile unsigned int *a = addr;
  44. unsigned long tmp;
  45. a += nr >> 5;
  46. mask = 1 << (nr & 0x1f);
  47. __asm__ __volatile__ (
  48. "1: \n\t"
  49. "movli.l @%1, %0 ! change_bit \n\t"
  50. "xor %2, %0 \n\t"
  51. "movco.l %0, @%1 \n\t"
  52. "bf 1b \n\t"
  53. : "=&z" (tmp)
  54. : "r" (a), "r" (mask)
  55. : "t", "memory"
  56. );
  57. }
  58. static inline int test_and_set_bit(int nr, volatile void *addr)
  59. {
  60. int mask, retval;
  61. volatile unsigned int *a = addr;
  62. unsigned long tmp;
  63. a += nr >> 5;
  64. mask = 1 << (nr & 0x1f);
  65. __asm__ __volatile__ (
  66. "1: \n\t"
  67. "movli.l @%2, %0 ! test_and_set_bit \n\t"
  68. "mov %0, %1 \n\t"
  69. "or %3, %0 \n\t"
  70. "movco.l %0, @%2 \n\t"
  71. "bf 1b \n\t"
  72. "and %3, %1 \n\t"
  73. : "=&z" (tmp), "=&r" (retval)
  74. : "r" (a), "r" (mask)
  75. : "t", "memory"
  76. );
  77. return retval != 0;
  78. }
  79. static inline int test_and_clear_bit(int nr, volatile void *addr)
  80. {
  81. int mask, retval;
  82. volatile unsigned int *a = addr;
  83. unsigned long tmp;
  84. a += nr >> 5;
  85. mask = 1 << (nr & 0x1f);
  86. __asm__ __volatile__ (
  87. "1: \n\t"
  88. "movli.l @%2, %0 ! test_and_clear_bit \n\t"
  89. "mov %0, %1 \n\t"
  90. "and %4, %0 \n\t"
  91. "movco.l %0, @%2 \n\t"
  92. "bf 1b \n\t"
  93. "and %3, %1 \n\t"
  94. "synco \n\t"
  95. : "=&z" (tmp), "=&r" (retval)
  96. : "r" (a), "r" (mask), "r" (~mask)
  97. : "t", "memory"
  98. );
  99. return retval != 0;
  100. }
  101. static inline int test_and_change_bit(int nr, volatile void *addr)
  102. {
  103. int mask, retval;
  104. volatile unsigned int *a = addr;
  105. unsigned long tmp;
  106. a += nr >> 5;
  107. mask = 1 << (nr & 0x1f);
  108. __asm__ __volatile__ (
  109. "1: \n\t"
  110. "movli.l @%2, %0 ! test_and_change_bit \n\t"
  111. "mov %0, %1 \n\t"
  112. "xor %3, %0 \n\t"
  113. "movco.l %0, @%2 \n\t"
  114. "bf 1b \n\t"
  115. "and %3, %1 \n\t"
  116. "synco \n\t"
  117. : "=&z" (tmp), "=&r" (retval)
  118. : "r" (a), "r" (mask)
  119. : "t", "memory"
  120. );
  121. return retval != 0;
  122. }
  123. #include <asm-generic/bitops/non-atomic.h>
  124. #endif /* __ASM_SH_BITOPS_LLSC_H */