futex.h 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (c) 2006 Ralf Baechle (ralf@linux-mips.org)
  7. */
  8. #ifndef _ASM_FUTEX_H
  9. #define _ASM_FUTEX_H
  10. #ifdef __KERNEL__
  11. #include <linux/futex.h>
  12. #include <linux/uaccess.h>
  13. #include <asm/asm-eva.h>
  14. #include <asm/barrier.h>
  15. #include <asm/compiler.h>
  16. #include <asm/errno.h>
  17. #include <asm/war.h>
  18. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  19. { \
  20. if (cpu_has_llsc && R10000_LLSC_WAR) { \
  21. __asm__ __volatile__( \
  22. " .set push \n" \
  23. " .set noat \n" \
  24. " .set arch=r4000 \n" \
  25. "1: ll %1, %4 # __futex_atomic_op \n" \
  26. " .set mips0 \n" \
  27. " " insn " \n" \
  28. " .set arch=r4000 \n" \
  29. "2: sc $1, %2 \n" \
  30. " beqzl $1, 1b \n" \
  31. __WEAK_LLSC_MB \
  32. "3: \n" \
  33. " .insn \n" \
  34. " .set pop \n" \
  35. " .set mips0 \n" \
  36. " .section .fixup,\"ax\" \n" \
  37. "4: li %0, %6 \n" \
  38. " j 3b \n" \
  39. " .previous \n" \
  40. " .section __ex_table,\"a\" \n" \
  41. " "__UA_ADDR "\t1b, 4b \n" \
  42. " "__UA_ADDR "\t2b, 4b \n" \
  43. " .previous \n" \
  44. : "=r" (ret), "=&r" (oldval), \
  45. "=" GCC_OFF_SMALL_ASM() (*uaddr) \
  46. : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
  47. "i" (-EFAULT) \
  48. : "memory"); \
  49. } else if (cpu_has_llsc) { \
  50. loongson_llsc_mb(); \
  51. __asm__ __volatile__( \
  52. " .set push \n" \
  53. " .set noat \n" \
  54. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  55. "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \
  56. " .set mips0 \n" \
  57. " " insn " \n" \
  58. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  59. "2: "user_sc("$1", "%2")" \n" \
  60. " beqz $1, 1b \n" \
  61. __WEAK_LLSC_MB \
  62. "3: \n" \
  63. " .insn \n" \
  64. " .set pop \n" \
  65. " .set mips0 \n" \
  66. " .section .fixup,\"ax\" \n" \
  67. "4: li %0, %6 \n" \
  68. " j 3b \n" \
  69. " .previous \n" \
  70. " .section __ex_table,\"a\" \n" \
  71. " "__UA_ADDR "\t1b, 4b \n" \
  72. " "__UA_ADDR "\t2b, 4b \n" \
  73. " .previous \n" \
  74. : "=r" (ret), "=&r" (oldval), \
  75. "=" GCC_OFF_SMALL_ASM() (*uaddr) \
  76. : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
  77. "i" (-EFAULT) \
  78. : "memory"); \
  79. } else \
  80. ret = -ENOSYS; \
  81. }
  82. static inline int
  83. arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
  84. {
  85. int oldval = 0, ret;
  86. pagefault_disable();
  87. switch (op) {
  88. case FUTEX_OP_SET:
  89. __futex_atomic_op("move $1, %z5", ret, oldval, uaddr, oparg);
  90. break;
  91. case FUTEX_OP_ADD:
  92. __futex_atomic_op("addu $1, %1, %z5",
  93. ret, oldval, uaddr, oparg);
  94. break;
  95. case FUTEX_OP_OR:
  96. __futex_atomic_op("or $1, %1, %z5",
  97. ret, oldval, uaddr, oparg);
  98. break;
  99. case FUTEX_OP_ANDN:
  100. __futex_atomic_op("and $1, %1, %z5",
  101. ret, oldval, uaddr, ~oparg);
  102. break;
  103. case FUTEX_OP_XOR:
  104. __futex_atomic_op("xor $1, %1, %z5",
  105. ret, oldval, uaddr, oparg);
  106. break;
  107. default:
  108. ret = -ENOSYS;
  109. }
  110. pagefault_enable();
  111. if (!ret)
  112. *oval = oldval;
  113. return ret;
  114. }
  115. static inline int
  116. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  117. u32 oldval, u32 newval)
  118. {
  119. int ret = 0;
  120. u32 val;
  121. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  122. return -EFAULT;
  123. if (cpu_has_llsc && R10000_LLSC_WAR) {
  124. __asm__ __volatile__(
  125. "# futex_atomic_cmpxchg_inatomic \n"
  126. " .set push \n"
  127. " .set noat \n"
  128. " .set arch=r4000 \n"
  129. "1: ll %1, %3 \n"
  130. " bne %1, %z4, 3f \n"
  131. " .set mips0 \n"
  132. " move $1, %z5 \n"
  133. " .set arch=r4000 \n"
  134. "2: sc $1, %2 \n"
  135. " beqzl $1, 1b \n"
  136. __WEAK_LLSC_MB
  137. "3: \n"
  138. " .insn \n"
  139. " .set pop \n"
  140. " .section .fixup,\"ax\" \n"
  141. "4: li %0, %6 \n"
  142. " j 3b \n"
  143. " .previous \n"
  144. " .section __ex_table,\"a\" \n"
  145. " "__UA_ADDR "\t1b, 4b \n"
  146. " "__UA_ADDR "\t2b, 4b \n"
  147. " .previous \n"
  148. : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
  149. : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
  150. "i" (-EFAULT)
  151. : "memory");
  152. } else if (cpu_has_llsc) {
  153. loongson_llsc_mb();
  154. __asm__ __volatile__(
  155. "# futex_atomic_cmpxchg_inatomic \n"
  156. " .set push \n"
  157. " .set noat \n"
  158. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  159. "1: "user_ll("%1", "%3")" \n"
  160. " bne %1, %z4, 3f \n"
  161. " .set mips0 \n"
  162. " move $1, %z5 \n"
  163. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  164. "2: "user_sc("$1", "%2")" \n"
  165. " beqz $1, 1b \n"
  166. __WEAK_LLSC_MB
  167. "3: \n"
  168. " .insn \n"
  169. " .set pop \n"
  170. " .section .fixup,\"ax\" \n"
  171. "4: li %0, %6 \n"
  172. " j 3b \n"
  173. " .previous \n"
  174. " .section __ex_table,\"a\" \n"
  175. " "__UA_ADDR "\t1b, 4b \n"
  176. " "__UA_ADDR "\t2b, 4b \n"
  177. " .previous \n"
  178. : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
  179. : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
  180. "i" (-EFAULT)
  181. : "memory");
  182. loongson_llsc_mb();
  183. } else
  184. return -ENOSYS;
  185. *uval = val;
  186. return ret;
  187. }
  188. #endif
  189. #endif /* _ASM_FUTEX_H */