locks.h 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. // locks.h - Thread synchronization primitives. PowerPC implementation.
  2. /* Copyright (C) 2002 Free Software Foundation
  3. This file is part of libgcj.
  4. This software is copyrighted work licensed under the terms of the
  5. Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
  6. details. */
  7. #ifndef __SYSDEP_LOCKS_H__
  8. #define __SYSDEP_LOCKS_H__
  9. #ifdef __powerpc64__
  10. #define _LARX "ldarx "
  11. #define _STCX "stdcx. "
  12. #else
  13. #define _LARX "lwarx "
  14. #ifdef __PPC405__
  15. #define _STCX "sync; stwcx. "
  16. #else
  17. #define _STCX "stwcx. "
  18. #endif
  19. #endif
  20. typedef size_t obj_addr_t; /* Integer type big enough for object */
  21. /* address. */
  22. inline static bool
  23. compare_and_swap (volatile obj_addr_t *addr, obj_addr_t old,
  24. obj_addr_t new_val)
  25. {
  26. int ret;
  27. __asm__ __volatile__ (
  28. "0: " _LARX "%0,0,%1 ;"
  29. " xor. %0,%3,%0;"
  30. " bne 1f;"
  31. " " _STCX "%2,0,%1;"
  32. " bne- 0b;"
  33. "1: "
  34. : "=&r" (ret)
  35. : "r" (addr), "r" (new_val), "r" (old)
  36. : "cr0", "memory");
  37. /* This version of __compare_and_swap is to be used when acquiring
  38. a lock, so we don't need to worry about whether other memory
  39. operations have completed, but we do need to be sure that any loads
  40. after this point really occur after we have acquired the lock. */
  41. __asm__ __volatile__ ("isync" : : : "memory");
  42. return ret == 0;
  43. }
  44. inline static void
  45. release_set (volatile obj_addr_t *addr, obj_addr_t new_val)
  46. {
  47. __asm__ __volatile__ ("sync" : : : "memory");
  48. *addr = new_val;
  49. }
  50. inline static bool
  51. compare_and_swap_release (volatile obj_addr_t *addr, obj_addr_t old,
  52. obj_addr_t new_val)
  53. {
  54. int ret;
  55. __asm__ __volatile__ ("sync" : : : "memory");
  56. __asm__ __volatile__ (
  57. "0: " _LARX "%0,0,%1 ;"
  58. " xor. %0,%3,%0;"
  59. " bne 1f;"
  60. " " _STCX "%2,0,%1;"
  61. " bne- 0b;"
  62. "1: "
  63. : "=&r" (ret)
  64. : "r" (addr), "r" (new_val), "r" (old)
  65. : "cr0", "memory");
  66. return ret == 0;
  67. }
  68. // Ensure that subsequent instructions do not execute on stale
  69. // data that was loaded from memory before the barrier.
  70. inline static void
  71. read_barrier ()
  72. {
  73. __asm__ __volatile__ ("isync" : : : "memory");
  74. }
  75. // Ensure that prior stores to memory are completed with respect to other
  76. // processors.
  77. inline static void
  78. write_barrier ()
  79. {
  80. __asm__ __volatile__ ("sync" : : : "memory");
  81. }
  82. #endif