entry.S 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415
  1. /*
  2. *
  3. * linux/arch/h8300/kernel/entry.S
  4. *
  5. * Yoshinori Sato <ysato@users.sourceforge.jp>
  6. * David McCullough <davidm@snapgear.com>
  7. *
  8. */
  9. /*
  10. * entry.S
  11. * include exception/interrupt gateway
  12. * system call entry
  13. */
  14. #include <linux/sys.h>
  15. #include <asm/unistd.h>
  16. #include <asm/setup.h>
  17. #include <asm/segment.h>
  18. #include <asm/linkage.h>
  19. #include <asm/asm-offsets.h>
  20. #include <asm/thread_info.h>
  21. #include <asm/errno.h>
  22. #if defined(CONFIG_CPU_H8300H)
  23. #define USERRET 8
  24. INTERRUPTS = 64
  25. .h8300h
  26. .macro SHLL2 reg
  27. shll.l \reg
  28. shll.l \reg
  29. .endm
  30. .macro SHLR2 reg
  31. shlr.l \reg
  32. shlr.l \reg
  33. .endm
  34. .macro SAVEREGS
  35. mov.l er0,@-sp
  36. mov.l er1,@-sp
  37. mov.l er2,@-sp
  38. mov.l er3,@-sp
  39. .endm
  40. .macro RESTOREREGS
  41. mov.l @sp+,er3
  42. mov.l @sp+,er2
  43. .endm
  44. .macro SAVEEXR
  45. .endm
  46. .macro RESTOREEXR
  47. .endm
  48. #endif
  49. #if defined(CONFIG_CPU_H8S)
  50. #define USERRET 10
  51. #define USEREXR 8
  52. INTERRUPTS = 128
  53. .h8300s
  54. .macro SHLL2 reg
  55. shll.l #2,\reg
  56. .endm
  57. .macro SHLR2 reg
  58. shlr.l #2,\reg
  59. .endm
  60. .macro SAVEREGS
  61. stm.l er0-er3,@-sp
  62. .endm
  63. .macro RESTOREREGS
  64. ldm.l @sp+,er2-er3
  65. .endm
  66. .macro SAVEEXR
  67. mov.w @(USEREXR:16,er0),r1
  68. mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
  69. .endm
  70. .macro RESTOREEXR
  71. mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
  72. mov.b r1l,r1h
  73. mov.w r1,@(USEREXR:16,er0)
  74. .endm
  75. #endif
  76. /* CPU context save/restore macros. */
  77. .macro SAVE_ALL
  78. mov.l er0,@-sp
  79. stc ccr,r0l /* check kernel mode */
  80. btst #4,r0l
  81. bne 5f
  82. /* user mode */
  83. mov.l sp,@_sw_usp
  84. mov.l @sp,er0 /* restore saved er0 */
  85. orc #0x10,ccr /* switch kernel stack */
  86. mov.l @_sw_ksp,sp
  87. sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
  88. SAVEREGS
  89. mov.l @_sw_usp,er0
  90. mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
  91. mov.l er1,@(LRET-LER3:16,sp)
  92. SAVEEXR
  93. mov.l @(LORIG-LER3:16,sp),er0
  94. mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
  95. mov.w e1,r1 /* e1 highbyte = ccr */
  96. and #0xef,r1h /* mask mode? flag */
  97. bra 6f
  98. 5:
  99. /* kernel mode */
  100. mov.l @sp,er0 /* restore saved er0 */
  101. subs #2,sp /* set dummy ccr */
  102. subs #4,sp /* set dummp sp */
  103. SAVEREGS
  104. mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
  105. 6:
  106. mov.b r1h,r1l
  107. mov.b #0,r1h
  108. mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
  109. mov.l @_sw_usp,er2
  110. mov.l er2,@(LSP-LER3:16,sp) /* set usp */
  111. mov.l er6,@-sp /* syscall arg #6 */
  112. mov.l er5,@-sp /* syscall arg #5 */
  113. mov.l er4,@-sp /* syscall arg #4 */
  114. .endm /* r1 = ccr */
  115. .macro RESTORE_ALL
  116. mov.l @sp+,er4
  117. mov.l @sp+,er5
  118. mov.l @sp+,er6
  119. RESTOREREGS
  120. mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
  121. btst #4,r0l
  122. bne 7f
  123. orc #0xc0,ccr
  124. mov.l @(LSP-LER1:16,sp),er0
  125. mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
  126. mov.l er1,@er0
  127. RESTOREEXR
  128. mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
  129. mov.b r1l,r1h
  130. mov.b @(LRET+1-LER1:16,sp),r1l
  131. mov.w r1,e1
  132. mov.w @(LRET+2-LER1:16,sp),r1
  133. mov.l er1,@(USERRET:16,er0)
  134. mov.l @sp+,er1
  135. add.l #(LRET-LER1),sp /* remove LORIG - LRET */
  136. mov.l sp,@_sw_ksp
  137. andc #0xef,ccr /* switch to user mode */
  138. mov.l er0,sp
  139. bra 8f
  140. 7:
  141. mov.l @sp+,er1
  142. add.l #10,sp
  143. 8:
  144. mov.l @sp+,er0
  145. adds #4,sp /* remove the sw created LVEC */
  146. rte
  147. .endm
  148. .globl _system_call
  149. .globl ret_from_exception
  150. .globl ret_from_fork
  151. .globl ret_from_kernel_thread
  152. .globl ret_from_interrupt
  153. .globl _interrupt_redirect_table
  154. .globl _sw_ksp,_sw_usp
  155. .globl _resume
  156. .globl _interrupt_entry
  157. .globl _trace_break
  158. .globl _nmi
  159. #if defined(CONFIG_ROMKERNEL)
  160. .section .int_redirect,"ax"
  161. _interrupt_redirect_table:
  162. #if defined(CONFIG_CPU_H8300H)
  163. .rept 7
  164. .long 0
  165. .endr
  166. #endif
  167. #if defined(CONFIG_CPU_H8S)
  168. .rept 5
  169. .long 0
  170. .endr
  171. jmp @_trace_break
  172. .long 0
  173. #endif
  174. jsr @_interrupt_entry /* NMI */
  175. jmp @_system_call /* TRAPA #0 (System call) */
  176. .long 0
  177. .long 0
  178. jmp @_trace_break /* TRAPA #3 (breakpoint) */
  179. .rept INTERRUPTS-12
  180. jsr @_interrupt_entry
  181. .endr
  182. #endif
  183. #if defined(CONFIG_RAMKERNEL)
  184. .globl _interrupt_redirect_table
  185. .section .bss
  186. _interrupt_redirect_table:
  187. .space 4
  188. #endif
  189. .section .text
  190. .align 2
  191. _interrupt_entry:
  192. SAVE_ALL
  193. /* r1l is saved ccr */
  194. mov.l sp,er0
  195. add.l #LVEC,er0
  196. btst #4,r1l
  197. bne 1f
  198. /* user LVEC */
  199. mov.l @_sw_usp,er0
  200. adds #4,er0
  201. 1:
  202. mov.l @er0,er0 /* LVEC address */
  203. #if defined(CONFIG_ROMKERNEL)
  204. sub.l #_interrupt_redirect_table,er0
  205. #endif
  206. #if defined(CONFIG_RAMKERNEL)
  207. mov.l @_interrupt_redirect_table,er1
  208. sub.l er1,er0
  209. #endif
  210. SHLR2 er0
  211. dec.l #1,er0
  212. mov.l sp,er1
  213. subs #4,er1 /* adjust ret_pc */
  214. #if defined(CONFIG_CPU_H8S)
  215. orc #7,exr
  216. #endif
  217. jsr @do_IRQ
  218. jmp @ret_from_interrupt
  219. _system_call:
  220. subs #4,sp /* dummy LVEC */
  221. SAVE_ALL
  222. /* er0: syscall nr */
  223. andc #0xbf,ccr
  224. mov.l er0,er4
  225. /* save top of frame */
  226. mov.l sp,er0
  227. jsr @set_esp0
  228. mov.l sp,er2
  229. and.w #0xe000,r2
  230. mov.l @(TI_FLAGS:16,er2),er2
  231. and.w #_TIF_WORK_SYSCALL_MASK,r2
  232. beq 1f
  233. mov.l sp,er0
  234. jsr @do_syscall_trace_enter
  235. 1:
  236. cmp.l #__NR_syscalls,er4
  237. bcc badsys
  238. SHLL2 er4
  239. mov.l #_sys_call_table,er0
  240. add.l er4,er0
  241. mov.l @er0,er4
  242. beq ret_from_exception:16
  243. mov.l @(LER1:16,sp),er0
  244. mov.l @(LER2:16,sp),er1
  245. mov.l @(LER3:16,sp),er2
  246. jsr @er4
  247. mov.l er0,@(LER0:16,sp) /* save the return value */
  248. mov.l sp,er2
  249. and.w #0xe000,r2
  250. mov.l @(TI_FLAGS:16,er2),er2
  251. and.w #_TIF_WORK_SYSCALL_MASK,r2
  252. beq 2f
  253. mov.l sp,er0
  254. jsr @do_syscall_trace_leave
  255. 2:
  256. orc #0xc0,ccr
  257. bra resume_userspace
  258. badsys:
  259. mov.l #-ENOSYS,er0
  260. mov.l er0,@(LER0:16,sp)
  261. bra resume_userspace
  262. #if !defined(CONFIG_PREEMPT)
  263. #define resume_kernel restore_all
  264. #endif
  265. ret_from_exception:
  266. #if defined(CONFIG_PREEMPT)
  267. orc #0xc0,ccr
  268. #endif
  269. ret_from_interrupt:
  270. mov.b @(LCCR+1:16,sp),r0l
  271. btst #4,r0l
  272. bne resume_kernel:16 /* return from kernel */
  273. resume_userspace:
  274. andc #0xbf,ccr
  275. mov.l sp,er4
  276. and.w #0xe000,r4 /* er4 <- current thread info */
  277. mov.l @(TI_FLAGS:16,er4),er1
  278. and.l #_TIF_WORK_MASK,er1
  279. beq restore_all:8
  280. work_pending:
  281. btst #TIF_NEED_RESCHED,r1l
  282. bne work_resched:8
  283. /* work notifysig */
  284. mov.l sp,er0
  285. subs #4,er0 /* er0: pt_regs */
  286. jsr @do_notify_resume
  287. bra resume_userspace:8
  288. work_resched:
  289. mov.l sp,er0
  290. jsr @set_esp0
  291. jsr @schedule
  292. bra resume_userspace:8
  293. restore_all:
  294. RESTORE_ALL /* Does RTE */
  295. #if defined(CONFIG_PREEMPT)
  296. resume_kernel:
  297. mov.l @(TI_PRE_COUNT:16,er4),er0
  298. bne restore_all:8
  299. need_resched:
  300. mov.l @(TI_FLAGS:16,er4),er0
  301. btst #TIF_NEED_RESCHED,r0l
  302. beq restore_all:8
  303. mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
  304. bmi restore_all:8
  305. mov.l sp,er0
  306. jsr @set_esp0
  307. jsr @preempt_schedule_irq
  308. bra need_resched:8
  309. #endif
  310. ret_from_fork:
  311. mov.l er2,er0
  312. jsr @schedule_tail
  313. jmp @ret_from_exception
  314. ret_from_kernel_thread:
  315. mov.l er2,er0
  316. jsr @schedule_tail
  317. mov.l @(LER4:16,sp),er0
  318. mov.l @(LER5:16,sp),er1
  319. jsr @er1
  320. jmp @ret_from_exception
  321. _resume:
  322. /*
  323. * Beware - when entering resume, offset of tss is in d1,
  324. * prev (the current task) is in a0, next (the new task)
  325. * is in a1 and d2.b is non-zero if the mm structure is
  326. * shared between the tasks, so don't change these
  327. * registers until their contents are no longer needed.
  328. */
  329. /* save sr */
  330. sub.w r3,r3
  331. stc ccr,r3l
  332. mov.w r3,@(THREAD_CCR+2:16,er0)
  333. /* disable interrupts */
  334. orc #0xc0,ccr
  335. mov.l @_sw_usp,er3
  336. mov.l er3,@(THREAD_USP:16,er0)
  337. mov.l sp,@(THREAD_KSP:16,er0)
  338. /* Skip address space switching if they are the same. */
  339. /* FIXME: what did we hack out of here, this does nothing! */
  340. mov.l @(THREAD_USP:16,er1),er0
  341. mov.l er0,@_sw_usp
  342. mov.l @(THREAD_KSP:16,er1),sp
  343. /* restore status register */
  344. mov.w @(THREAD_CCR+2:16,er1),r3
  345. ldc r3l,ccr
  346. rts
  347. _trace_break:
  348. subs #4,sp
  349. SAVE_ALL
  350. sub.l er1,er1
  351. dec.l #1,er1
  352. mov.l er1,@(LORIG,sp)
  353. mov.l sp,er0
  354. jsr @set_esp0
  355. mov.l @_sw_usp,er0
  356. mov.l @er0,er1
  357. mov.w @(-2:16,er1),r2
  358. cmp.w #0x5730,r2
  359. beq 1f
  360. subs #2,er1
  361. mov.l er1,@er0
  362. 1:
  363. and.w #0xff,e1
  364. mov.l er1,er0
  365. jsr @trace_trap
  366. jmp @ret_from_exception
  367. _nmi:
  368. subs #4, sp
  369. mov.l er0, @-sp
  370. mov.l @_interrupt_redirect_table, er0
  371. add.l #8*4, er0
  372. mov.l er0, @(4,sp)
  373. mov.l @sp+, er0
  374. jmp @_interrupt_entry
  375. .section .bss
  376. _sw_ksp:
  377. .space 4
  378. _sw_usp:
  379. .space 4
  380. .end