tsan_interface_atomic.h 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221
  1. //===-- tsan_interface_atomic.h ---------------------------------*- C++ -*-===//
  2. //
  3. // This file is distributed under the University of Illinois Open Source
  4. // License. See LICENSE.TXT for details.
  5. //
  6. //===----------------------------------------------------------------------===//
  7. //
  8. // This file is a part of ThreadSanitizer (TSan), a race detector.
  9. //
  10. // Public interface header for TSan atomics.
  11. //===----------------------------------------------------------------------===//
  12. #ifndef TSAN_INTERFACE_ATOMIC_H
  13. #define TSAN_INTERFACE_ATOMIC_H
  14. #ifdef __cplusplus
  15. extern "C" {
  16. #endif
  17. typedef char __tsan_atomic8;
  18. typedef short __tsan_atomic16; // NOLINT
  19. typedef int __tsan_atomic32;
  20. typedef long __tsan_atomic64; // NOLINT
  21. #if defined(__SIZEOF_INT128__) \
  22. || (__clang_major__ * 100 + __clang_minor__ >= 302)
  23. __extension__ typedef __int128 __tsan_atomic128;
  24. # define __TSAN_HAS_INT128 1
  25. #else
  26. # define __TSAN_HAS_INT128 0
  27. #endif
  28. // Part of ABI, do not change.
  29. // http://llvm.org/viewvc/llvm-project/libcxx/trunk/include/atomic?view=markup
  30. typedef enum {
  31. __tsan_memory_order_relaxed,
  32. __tsan_memory_order_consume,
  33. __tsan_memory_order_acquire,
  34. __tsan_memory_order_release,
  35. __tsan_memory_order_acq_rel,
  36. __tsan_memory_order_seq_cst
  37. } __tsan_memory_order;
  38. __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a,
  39. __tsan_memory_order mo);
  40. __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a,
  41. __tsan_memory_order mo);
  42. __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a,
  43. __tsan_memory_order mo);
  44. __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a,
  45. __tsan_memory_order mo);
  46. #if __TSAN_HAS_INT128
  47. __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a,
  48. __tsan_memory_order mo);
  49. #endif
  50. void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v,
  51. __tsan_memory_order mo);
  52. void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v,
  53. __tsan_memory_order mo);
  54. void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v,
  55. __tsan_memory_order mo);
  56. void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v,
  57. __tsan_memory_order mo);
  58. #if __TSAN_HAS_INT128
  59. void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v,
  60. __tsan_memory_order mo);
  61. #endif
  62. __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a,
  63. __tsan_atomic8 v, __tsan_memory_order mo);
  64. __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a,
  65. __tsan_atomic16 v, __tsan_memory_order mo);
  66. __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a,
  67. __tsan_atomic32 v, __tsan_memory_order mo);
  68. __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a,
  69. __tsan_atomic64 v, __tsan_memory_order mo);
  70. #if __TSAN_HAS_INT128
  71. __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a,
  72. __tsan_atomic128 v, __tsan_memory_order mo);
  73. #endif
  74. __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a,
  75. __tsan_atomic8 v, __tsan_memory_order mo);
  76. __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a,
  77. __tsan_atomic16 v, __tsan_memory_order mo);
  78. __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a,
  79. __tsan_atomic32 v, __tsan_memory_order mo);
  80. __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a,
  81. __tsan_atomic64 v, __tsan_memory_order mo);
  82. #if __TSAN_HAS_INT128
  83. __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a,
  84. __tsan_atomic128 v, __tsan_memory_order mo);
  85. #endif
  86. __tsan_atomic8 __tsan_atomic8_fetch_sub(volatile __tsan_atomic8 *a,
  87. __tsan_atomic8 v, __tsan_memory_order mo);
  88. __tsan_atomic16 __tsan_atomic16_fetch_sub(volatile __tsan_atomic16 *a,
  89. __tsan_atomic16 v, __tsan_memory_order mo);
  90. __tsan_atomic32 __tsan_atomic32_fetch_sub(volatile __tsan_atomic32 *a,
  91. __tsan_atomic32 v, __tsan_memory_order mo);
  92. __tsan_atomic64 __tsan_atomic64_fetch_sub(volatile __tsan_atomic64 *a,
  93. __tsan_atomic64 v, __tsan_memory_order mo);
  94. #if __TSAN_HAS_INT128
  95. __tsan_atomic128 __tsan_atomic128_fetch_sub(volatile __tsan_atomic128 *a,
  96. __tsan_atomic128 v, __tsan_memory_order mo);
  97. #endif
  98. __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a,
  99. __tsan_atomic8 v, __tsan_memory_order mo);
  100. __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a,
  101. __tsan_atomic16 v, __tsan_memory_order mo);
  102. __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a,
  103. __tsan_atomic32 v, __tsan_memory_order mo);
  104. __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a,
  105. __tsan_atomic64 v, __tsan_memory_order mo);
  106. #if __TSAN_HAS_INT128
  107. __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a,
  108. __tsan_atomic128 v, __tsan_memory_order mo);
  109. #endif
  110. __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a,
  111. __tsan_atomic8 v, __tsan_memory_order mo);
  112. __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a,
  113. __tsan_atomic16 v, __tsan_memory_order mo);
  114. __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a,
  115. __tsan_atomic32 v, __tsan_memory_order mo);
  116. __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a,
  117. __tsan_atomic64 v, __tsan_memory_order mo);
  118. #if __TSAN_HAS_INT128
  119. __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a,
  120. __tsan_atomic128 v, __tsan_memory_order mo);
  121. #endif
  122. __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a,
  123. __tsan_atomic8 v, __tsan_memory_order mo);
  124. __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a,
  125. __tsan_atomic16 v, __tsan_memory_order mo);
  126. __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a,
  127. __tsan_atomic32 v, __tsan_memory_order mo);
  128. __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a,
  129. __tsan_atomic64 v, __tsan_memory_order mo);
  130. #if __TSAN_HAS_INT128
  131. __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a,
  132. __tsan_atomic128 v, __tsan_memory_order mo);
  133. #endif
  134. __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a,
  135. __tsan_atomic8 v, __tsan_memory_order mo);
  136. __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a,
  137. __tsan_atomic16 v, __tsan_memory_order mo);
  138. __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a,
  139. __tsan_atomic32 v, __tsan_memory_order mo);
  140. __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a,
  141. __tsan_atomic64 v, __tsan_memory_order mo);
  142. #if __TSAN_HAS_INT128
  143. __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a,
  144. __tsan_atomic128 v, __tsan_memory_order mo);
  145. #endif
  146. int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a,
  147. __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
  148. __tsan_memory_order fail_mo);
  149. int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a,
  150. __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
  151. __tsan_memory_order fail_mo);
  152. int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a,
  153. __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
  154. __tsan_memory_order fail_mo);
  155. int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a,
  156. __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
  157. __tsan_memory_order fail_mo);
  158. #if __TSAN_HAS_INT128
  159. int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a,
  160. __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
  161. __tsan_memory_order fail_mo);
  162. #endif
  163. int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a,
  164. __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
  165. __tsan_memory_order fail_mo);
  166. int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a,
  167. __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
  168. __tsan_memory_order fail_mo);
  169. int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a,
  170. __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
  171. __tsan_memory_order fail_mo);
  172. int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a,
  173. __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
  174. __tsan_memory_order fail_mo);
  175. #if __TSAN_HAS_INT128
  176. int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a,
  177. __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
  178. __tsan_memory_order fail_mo);
  179. #endif
  180. __tsan_atomic8 __tsan_atomic8_compare_exchange_val(
  181. volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v,
  182. __tsan_memory_order mo, __tsan_memory_order fail_mo);
  183. __tsan_atomic16 __tsan_atomic16_compare_exchange_val(
  184. volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v,
  185. __tsan_memory_order mo, __tsan_memory_order fail_mo);
  186. __tsan_atomic32 __tsan_atomic32_compare_exchange_val(
  187. volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v,
  188. __tsan_memory_order mo, __tsan_memory_order fail_mo);
  189. __tsan_atomic64 __tsan_atomic64_compare_exchange_val(
  190. volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v,
  191. __tsan_memory_order mo, __tsan_memory_order fail_mo);
  192. #if __TSAN_HAS_INT128
  193. __tsan_atomic128 __tsan_atomic128_compare_exchange_val(
  194. volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
  195. __tsan_memory_order mo, __tsan_memory_order fail_mo);
  196. #endif
  197. void __tsan_atomic_thread_fence(__tsan_memory_order mo);
  198. void __tsan_atomic_signal_fence(__tsan_memory_order mo);
  199. #ifdef __cplusplus
  200. } // extern "C"
  201. #endif
  202. #endif // TSAN_INTERFACE_ATOMIC_H