intel_intrin.h 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. #ifndef _ASM_IA64_INTEL_INTRIN_H
  2. #define _ASM_IA64_INTEL_INTRIN_H
  3. /*
  4. * Intel Compiler Intrinsics
  5. *
  6. * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
  7. * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
  8. * Copyright (C) 2005,2006 Hongjiu Lu <hongjiu.lu@intel.com>
  9. *
  10. */
  11. #include <ia64intrin.h>
  12. #define ia64_barrier() __memory_barrier()
  13. #define ia64_stop() /* Nothing: As of now stop bit is generated for each
  14. * intrinsic
  15. */
  16. #define ia64_native_getreg __getReg
  17. #define ia64_native_setreg __setReg
  18. #define ia64_hint __hint
  19. #define ia64_hint_pause __hint_pause
  20. #define ia64_mux1_brcst _m64_mux1_brcst
  21. #define ia64_mux1_mix _m64_mux1_mix
  22. #define ia64_mux1_shuf _m64_mux1_shuf
  23. #define ia64_mux1_alt _m64_mux1_alt
  24. #define ia64_mux1_rev _m64_mux1_rev
  25. #define ia64_mux1(x,v) _m_to_int64(_m64_mux1(_m_from_int64(x), (v)))
  26. #define ia64_popcnt _m64_popcnt
  27. #define ia64_getf_exp __getf_exp
  28. #define ia64_shrp _m64_shrp
  29. #define ia64_tpa __tpa
  30. #define ia64_invala __invala
  31. #define ia64_invala_gr __invala_gr
  32. #define ia64_invala_fr __invala_fr
  33. #define ia64_nop __nop
  34. #define ia64_sum __sum
  35. #define ia64_native_ssm __ssm
  36. #define ia64_rum __rum
  37. #define ia64_native_rsm __rsm
  38. #define ia64_native_fc __fc
  39. #define ia64_ldfs __ldfs
  40. #define ia64_ldfd __ldfd
  41. #define ia64_ldfe __ldfe
  42. #define ia64_ldf8 __ldf8
  43. #define ia64_ldf_fill __ldf_fill
  44. #define ia64_stfs __stfs
  45. #define ia64_stfd __stfd
  46. #define ia64_stfe __stfe
  47. #define ia64_stf8 __stf8
  48. #define ia64_stf_spill __stf_spill
  49. #define ia64_mf __mf
  50. #define ia64_mfa __mfa
  51. #define ia64_fetchadd4_acq __fetchadd4_acq
  52. #define ia64_fetchadd4_rel __fetchadd4_rel
  53. #define ia64_fetchadd8_acq __fetchadd8_acq
  54. #define ia64_fetchadd8_rel __fetchadd8_rel
  55. #define ia64_xchg1 _InterlockedExchange8
  56. #define ia64_xchg2 _InterlockedExchange16
  57. #define ia64_xchg4 _InterlockedExchange
  58. #define ia64_xchg8 _InterlockedExchange64
  59. #define ia64_cmpxchg1_rel _InterlockedCompareExchange8_rel
  60. #define ia64_cmpxchg1_acq _InterlockedCompareExchange8_acq
  61. #define ia64_cmpxchg2_rel _InterlockedCompareExchange16_rel
  62. #define ia64_cmpxchg2_acq _InterlockedCompareExchange16_acq
  63. #define ia64_cmpxchg4_rel _InterlockedCompareExchange_rel
  64. #define ia64_cmpxchg4_acq _InterlockedCompareExchange_acq
  65. #define ia64_cmpxchg8_rel _InterlockedCompareExchange64_rel
  66. #define ia64_cmpxchg8_acq _InterlockedCompareExchange64_acq
  67. #define __ia64_set_dbr(index, val) \
  68. __setIndReg(_IA64_REG_INDR_DBR, index, val)
  69. #define ia64_set_ibr(index, val) \
  70. __setIndReg(_IA64_REG_INDR_IBR, index, val)
  71. #define ia64_set_pkr(index, val) \
  72. __setIndReg(_IA64_REG_INDR_PKR, index, val)
  73. #define ia64_set_pmc(index, val) \
  74. __setIndReg(_IA64_REG_INDR_PMC, index, val)
  75. #define ia64_set_pmd(index, val) \
  76. __setIndReg(_IA64_REG_INDR_PMD, index, val)
  77. #define ia64_native_set_rr(index, val) \
  78. __setIndReg(_IA64_REG_INDR_RR, index, val)
  79. #define ia64_native_get_cpuid(index) \
  80. __getIndReg(_IA64_REG_INDR_CPUID, index)
  81. #define __ia64_get_dbr(index) __getIndReg(_IA64_REG_INDR_DBR, index)
  82. #define ia64_get_ibr(index) __getIndReg(_IA64_REG_INDR_IBR, index)
  83. #define ia64_get_pkr(index) __getIndReg(_IA64_REG_INDR_PKR, index)
  84. #define ia64_get_pmc(index) __getIndReg(_IA64_REG_INDR_PMC, index)
  85. #define ia64_native_get_pmd(index) __getIndReg(_IA64_REG_INDR_PMD, index)
  86. #define ia64_native_get_rr(index) __getIndReg(_IA64_REG_INDR_RR, index)
  87. #define ia64_srlz_d __dsrlz
  88. #define ia64_srlz_i __isrlz
  89. #define ia64_dv_serialize_data()
  90. #define ia64_dv_serialize_instruction()
  91. #define ia64_st1_rel __st1_rel
  92. #define ia64_st2_rel __st2_rel
  93. #define ia64_st4_rel __st4_rel
  94. #define ia64_st8_rel __st8_rel
  95. /* FIXME: need st4.rel.nta intrinsic */
  96. #define ia64_st4_rel_nta __st4_rel
  97. #define ia64_ld1_acq __ld1_acq
  98. #define ia64_ld2_acq __ld2_acq
  99. #define ia64_ld4_acq __ld4_acq
  100. #define ia64_ld8_acq __ld8_acq
  101. #define ia64_sync_i __synci
  102. #define ia64_native_thash __thash
  103. #define ia64_native_ttag __ttag
  104. #define ia64_itcd __itcd
  105. #define ia64_itci __itci
  106. #define ia64_itrd __itrd
  107. #define ia64_itri __itri
  108. #define ia64_ptce __ptce
  109. #define ia64_ptcl __ptcl
  110. #define ia64_native_ptcg __ptcg
  111. #define ia64_native_ptcga __ptcga
  112. #define ia64_ptri __ptri
  113. #define ia64_ptrd __ptrd
  114. #define ia64_dep_mi _m64_dep_mi
  115. /* Values for lfhint in __lfetch and __lfetch_fault */
  116. #define ia64_lfhint_none __lfhint_none
  117. #define ia64_lfhint_nt1 __lfhint_nt1
  118. #define ia64_lfhint_nt2 __lfhint_nt2
  119. #define ia64_lfhint_nta __lfhint_nta
  120. #define ia64_lfetch __lfetch
  121. #define ia64_lfetch_excl __lfetch_excl
  122. #define ia64_lfetch_fault __lfetch_fault
  123. #define ia64_lfetch_fault_excl __lfetch_fault_excl
  124. #define ia64_native_intrin_local_irq_restore(x) \
  125. do { \
  126. if ((x) != 0) { \
  127. ia64_native_ssm(IA64_PSR_I); \
  128. ia64_srlz_d(); \
  129. } else { \
  130. ia64_native_rsm(IA64_PSR_I); \
  131. } \
  132. } while (0)
  133. #define __builtin_trap() __break(0);
  134. #endif /* _ASM_IA64_INTEL_INTRIN_H */