ktlb.S 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258
  1. /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
  2. *
  3. * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
  4. * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de)
  5. * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx)
  6. * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
  7. */
  8. #include <asm/head.h>
  9. #include <asm/asi.h>
  10. #include <asm/page.h>
  11. #include <asm/pgtable.h>
  12. #include <asm/tsb.h>
  13. .text
  14. .align 32
  15. kvmap_itlb:
  16. /* g6: TAG TARGET */
  17. mov TLB_TAG_ACCESS, %g4
  18. ldxa [%g4] ASI_IMMU, %g4
  19. /* sun4v_itlb_miss branches here with the missing virtual
  20. * address already loaded into %g4
  21. */
  22. kvmap_itlb_4v:
  23. /* Catch kernel NULL pointer calls. */
  24. sethi %hi(PAGE_SIZE), %g5
  25. cmp %g4, %g5
  26. blu,pn %xcc, kvmap_itlb_longpath
  27. nop
  28. KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
  29. kvmap_itlb_tsb_miss:
  30. sethi %hi(LOW_OBP_ADDRESS), %g5
  31. cmp %g4, %g5
  32. blu,pn %xcc, kvmap_itlb_vmalloc_addr
  33. mov 0x1, %g5
  34. sllx %g5, 32, %g5
  35. cmp %g4, %g5
  36. blu,pn %xcc, kvmap_itlb_obp
  37. nop
  38. kvmap_itlb_vmalloc_addr:
  39. KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
  40. TSB_LOCK_TAG(%g1, %g2, %g7)
  41. TSB_WRITE(%g1, %g5, %g6)
  42. /* fallthrough to TLB load */
  43. kvmap_itlb_load:
  44. 661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
  45. retry
  46. .section .sun4v_2insn_patch, "ax"
  47. .word 661b
  48. nop
  49. nop
  50. .previous
  51. /* For sun4v the ASI_ITLB_DATA_IN store and the retry
  52. * instruction get nop'd out and we get here to branch
  53. * to the sun4v tlb load code. The registers are setup
  54. * as follows:
  55. *
  56. * %g4: vaddr
  57. * %g5: PTE
  58. * %g6: TAG
  59. *
  60. * The sun4v TLB load wants the PTE in %g3 so we fix that
  61. * up here.
  62. */
  63. ba,pt %xcc, sun4v_itlb_load
  64. mov %g5, %g3
  65. kvmap_itlb_longpath:
  66. 661: rdpr %pstate, %g5
  67. wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
  68. .section .sun4v_2insn_patch, "ax"
  69. .word 661b
  70. SET_GL(1)
  71. nop
  72. .previous
  73. rdpr %tpc, %g5
  74. ba,pt %xcc, sparc64_realfault_common
  75. mov FAULT_CODE_ITLB, %g4
  76. kvmap_itlb_obp:
  77. OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
  78. TSB_LOCK_TAG(%g1, %g2, %g7)
  79. TSB_WRITE(%g1, %g5, %g6)
  80. ba,pt %xcc, kvmap_itlb_load
  81. nop
  82. kvmap_dtlb_obp:
  83. OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
  84. TSB_LOCK_TAG(%g1, %g2, %g7)
  85. TSB_WRITE(%g1, %g5, %g6)
  86. ba,pt %xcc, kvmap_dtlb_load
  87. nop
  88. kvmap_linear_early:
  89. sethi %hi(kern_linear_pte_xor), %g7
  90. ldx [%g7 + %lo(kern_linear_pte_xor)], %g2
  91. ba,pt %xcc, kvmap_dtlb_tsb4m_load
  92. xor %g2, %g4, %g5
  93. .align 32
  94. kvmap_dtlb_tsb4m_load:
  95. TSB_LOCK_TAG(%g1, %g2, %g7)
  96. TSB_WRITE(%g1, %g5, %g6)
  97. ba,pt %xcc, kvmap_dtlb_load
  98. nop
  99. kvmap_dtlb:
  100. /* %g6: TAG TARGET */
  101. mov TLB_TAG_ACCESS, %g4
  102. ldxa [%g4] ASI_DMMU, %g4
  103. /* sun4v_dtlb_miss branches here with the missing virtual
  104. * address already loaded into %g4
  105. */
  106. kvmap_dtlb_4v:
  107. brgez,pn %g4, kvmap_dtlb_nonlinear
  108. nop
  109. #ifdef CONFIG_DEBUG_PAGEALLOC
  110. /* Index through the base page size TSB even for linear
  111. * mappings when using page allocation debugging.
  112. */
  113. KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
  114. #else
  115. /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */
  116. KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
  117. #endif
  118. /* Linear mapping TSB lookup failed. Fallthrough to kernel
  119. * page table based lookup.
  120. */
  121. .globl kvmap_linear_patch
  122. kvmap_linear_patch:
  123. ba,a,pt %xcc, kvmap_linear_early
  124. kvmap_dtlb_vmalloc_addr:
  125. KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
  126. TSB_LOCK_TAG(%g1, %g2, %g7)
  127. TSB_WRITE(%g1, %g5, %g6)
  128. /* fallthrough to TLB load */
  129. kvmap_dtlb_load:
  130. 661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
  131. retry
  132. .section .sun4v_2insn_patch, "ax"
  133. .word 661b
  134. nop
  135. nop
  136. .previous
  137. /* For sun4v the ASI_DTLB_DATA_IN store and the retry
  138. * instruction get nop'd out and we get here to branch
  139. * to the sun4v tlb load code. The registers are setup
  140. * as follows:
  141. *
  142. * %g4: vaddr
  143. * %g5: PTE
  144. * %g6: TAG
  145. *
  146. * The sun4v TLB load wants the PTE in %g3 so we fix that
  147. * up here.
  148. */
  149. ba,pt %xcc, sun4v_dtlb_load
  150. mov %g5, %g3
  151. #ifdef CONFIG_SPARSEMEM_VMEMMAP
  152. kvmap_vmemmap:
  153. KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
  154. ba,a,pt %xcc, kvmap_dtlb_load
  155. #endif
  156. kvmap_dtlb_nonlinear:
  157. /* Catch kernel NULL pointer derefs. */
  158. sethi %hi(PAGE_SIZE), %g5
  159. cmp %g4, %g5
  160. bleu,pn %xcc, kvmap_dtlb_longpath
  161. nop
  162. #ifdef CONFIG_SPARSEMEM_VMEMMAP
  163. /* Do not use the TSB for vmemmap. */
  164. sethi %hi(VMEMMAP_BASE), %g5
  165. ldx [%g5 + %lo(VMEMMAP_BASE)], %g5
  166. cmp %g4,%g5
  167. bgeu,pn %xcc, kvmap_vmemmap
  168. nop
  169. #endif
  170. KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
  171. kvmap_dtlb_tsbmiss:
  172. sethi %hi(MODULES_VADDR), %g5
  173. cmp %g4, %g5
  174. blu,pn %xcc, kvmap_dtlb_longpath
  175. sethi %hi(VMALLOC_END), %g5
  176. ldx [%g5 + %lo(VMALLOC_END)], %g5
  177. cmp %g4, %g5
  178. bgeu,pn %xcc, kvmap_dtlb_longpath
  179. nop
  180. kvmap_check_obp:
  181. sethi %hi(LOW_OBP_ADDRESS), %g5
  182. cmp %g4, %g5
  183. blu,pn %xcc, kvmap_dtlb_vmalloc_addr
  184. mov 0x1, %g5
  185. sllx %g5, 32, %g5
  186. cmp %g4, %g5
  187. blu,pn %xcc, kvmap_dtlb_obp
  188. nop
  189. ba,pt %xcc, kvmap_dtlb_vmalloc_addr
  190. nop
  191. kvmap_dtlb_longpath:
  192. 661: rdpr %pstate, %g5
  193. wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
  194. .section .sun4v_2insn_patch, "ax"
  195. .word 661b
  196. SET_GL(1)
  197. ldxa [%g0] ASI_SCRATCHPAD, %g5
  198. .previous
  199. rdpr %tl, %g3
  200. cmp %g3, 1
  201. 661: mov TLB_TAG_ACCESS, %g4
  202. ldxa [%g4] ASI_DMMU, %g5
  203. .section .sun4v_2insn_patch, "ax"
  204. .word 661b
  205. ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
  206. nop
  207. .previous
  208. be,pt %xcc, sparc64_realfault_common
  209. mov FAULT_CODE_DTLB, %g4
  210. ba,pt %xcc, winfix_trampoline
  211. nop