locks.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. // locks.h - Thread synchronization primitives. Sparc implementation.
  2. /* Copyright (C) 2002, 2007 Free Software Foundation
  3. This file is part of libgcj.
  4. This software is copyrighted work licensed under the terms of the
  5. Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
  6. details. */
  7. #ifndef __SYSDEP_LOCKS_H__
  8. #define __SYSDEP_LOCKS_H__
  9. typedef size_t obj_addr_t; /* Integer type big enough for object */
  10. /* address. */
  11. #ifdef __arch64__
  12. /* Sparc64 implementation, use cas instruction. */
  13. inline static bool
  14. compare_and_swap(volatile obj_addr_t *addr,
  15. obj_addr_t old,
  16. obj_addr_t new_val)
  17. {
  18. __asm__ __volatile__("casx [%2], %3, %0\n\t"
  19. "membar #StoreLoad | #StoreStore"
  20. : "=&r" (new_val)
  21. : "0" (new_val), "r" (addr), "r" (old)
  22. : "memory");
  23. return (new_val == old) ? true : false;
  24. }
  25. inline static void
  26. release_set(volatile obj_addr_t *addr, obj_addr_t new_val)
  27. {
  28. __asm__ __volatile__("membar #StoreStore | #LoadStore" : : : "memory");
  29. *(addr) = new_val;
  30. }
  31. inline static bool
  32. compare_and_swap_release(volatile obj_addr_t *addr, obj_addr_t old,
  33. obj_addr_t new_val)
  34. {
  35. return compare_and_swap(addr, old, new_val);
  36. }
  37. inline static void
  38. read_barrier()
  39. {
  40. __asm__ __volatile__("membar #LoadLoad | #LoadStore" : : : "memory");
  41. }
  42. inline static void
  43. write_barrier()
  44. {
  45. __asm__ __volatile__("membar #StoreLoad | #StoreStore" : : : "memory");
  46. }
  47. #else
  48. /* Sparc32 implementation, use a spinlock. */
  49. static unsigned char __cas_lock = 0;
  50. inline static void
  51. __cas_start_atomic(void)
  52. {
  53. unsigned int tmp;
  54. __asm__ __volatile__(
  55. "1: ldstub [%1], %0\n"
  56. " orcc %0, 0x0, %%g0\n"
  57. " be 3f\n"
  58. " nop\n"
  59. "2: ldub [%1], %0\n"
  60. " orcc %0, 0x0, %%g0\n"
  61. " bne 2b\n"
  62. " nop\n"
  63. "3:" : "=&r" (tmp)
  64. : "r" (&__cas_lock)
  65. : "memory", "cc");
  66. }
  67. inline static void
  68. __cas_end_atomic(void)
  69. {
  70. __asm__ __volatile__(
  71. "stb %%g0, [%0]"
  72. : /* no outputs */
  73. : "r" (&__cas_lock)
  74. : "memory");
  75. }
  76. inline static bool
  77. compare_and_swap(volatile obj_addr_t *addr,
  78. obj_addr_t old,
  79. obj_addr_t new_val)
  80. {
  81. bool ret;
  82. __cas_start_atomic ();
  83. if (*addr != old)
  84. {
  85. ret = false;
  86. }
  87. else
  88. {
  89. *addr = new_val;
  90. ret = true;
  91. }
  92. __cas_end_atomic ();
  93. return ret;
  94. }
  95. inline static void
  96. release_set(volatile obj_addr_t *addr, obj_addr_t new_val)
  97. {
  98. /* Technically stbar would be needed here but no sparc32
  99. system actually requires it. Also the stbar would mean
  100. this code would not work on sparcv7 chips. */
  101. __asm__ __volatile__("" : : : "memory");
  102. *(addr) = new_val;
  103. }
  104. inline static bool
  105. compare_and_swap_release(volatile obj_addr_t *addr, obj_addr_t old,
  106. obj_addr_t new_val)
  107. {
  108. return compare_and_swap(addr, old, new_val);
  109. }
  110. inline static void
  111. read_barrier()
  112. {
  113. __asm__ __volatile__ ("" : : : "memory");
  114. }
  115. inline static void
  116. write_barrier()
  117. {
  118. __asm__ __volatile__ ("" : : : "memory");
  119. }
  120. #endif /* __arch64__ */
  121. #endif /* ! __SYSDEP_LOCKS_H__ */