aarch64.h 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335
  1. #ifndef LKMC_AARCH64_H
  2. #define LKMC_AARCH64_H
  3. #include <lkmc/arm_aarch64.h>
  4. #define LKMC_ASSERT_EQ(reg, const) \
  5. mov x0, reg; \
  6. ldr x1, const; \
  7. ldr w2, =__LINE__; \
  8. bl lkmc_assert_eq_64; \
  9. ;
  10. #define LKMC_ASSERT_EQ_REG(reg1, reg2) \
  11. str reg2, [sp, -16]!; \
  12. mov x0, reg1; \
  13. ldr x1, [sp], 16; \
  14. ldr w2, =__LINE__; \
  15. bl lkmc_assert_eq_64; \
  16. ;
  17. #define LKMC_ASSERT_EQ_REG_32(reg1, reg2) \
  18. str reg2, [sp, -16]!; \
  19. mov w0, reg1; \
  20. ldr w1, [sp], 16; \
  21. ldr w2, =__LINE__; \
  22. bl lkmc_assert_eq_32; \
  23. ;
  24. #define LKMC_ASSERT_FAIL \
  25. ldr w0, =__LINE__; \
  26. bl lkmc_assert_fail; \
  27. ;
  28. #define LKMC_ASSERT_MEMCMP(label1, label2, size) \
  29. adr x0, label1; \
  30. adr x1, label2; \
  31. ldr x2, size; \
  32. ldr x3, =__LINE__; \
  33. bl lkmc_assert_memcmp; \
  34. ;
  35. /* https://cirosantilli.com/linux-kernel-module-cheat#arm-calling-convention */
  36. #define LKMC_EPILOGUE \
  37. ldp x19, x20, [sp, 0x50]; \
  38. ldp x21, x22, [sp, 0x40]; \
  39. ldp x23, x24, [sp, 0x30]; \
  40. ldp x25, x26, [sp, 0x20]; \
  41. ldp x27, x28, [sp, 0x10]; \
  42. ldp x29, x30, [sp]; \
  43. add sp, sp, 0x60; \
  44. mov x0, 0; \
  45. ret; \
  46. ;
  47. /* https://cirosantilli.com/linux-kernel-module-cheat#arm-calling-convention */
  48. #define LKMC_PROLOGUE \
  49. .text; \
  50. .global main; \
  51. main: \
  52. sub sp, sp, 0x60; \
  53. stp x29, x30, [sp]; \
  54. stp x27, x28, [sp, 0x10]; \
  55. stp x25, x26, [sp, 0x20]; \
  56. stp x23, x24, [sp, 0x30]; \
  57. stp x21, x22, [sp, 0x40]; \
  58. stp x19, x20, [sp, 0x50]; \
  59. main_after_prologue: \
  60. ;
  61. /** Fields of system registers. */
  62. /* Counter: https://cirosantilli.com/linux-kernel-module-cheat#arm-timer */
  63. #define LKMC_SYSREG_CNTV_CTL_ENABLE (1 << 0)
  64. #define LKMC_SYSREG_CNTV_CTL_IMASK (1 << 1)
  65. #define LKMC_SYSREG_CNTV_CTL_ISTATUS (1 << 2)
  66. /* DAIF. */
  67. #define LKMC_SYSREG_BITS_DAIF_FIQ (1<<0)
  68. #define LKMC_SYSREG_BITS_DAIF_IRQ (1<<1)
  69. #define LKMC_SYSREG_BITS_DAIF_ABT (1<<2)
  70. #define LKMC_SYSREG_BITS_DAIF_DBG (1<<3)
  71. /* LKMC_VECTOR_*
  72. *
  73. * https://cirosantilli.com/linux-kernel-module-cheat#armv8-exception-vector-table-format
  74. */
  75. #define LKMC_VECTOR_SYNC_SP0 (0x1)
  76. #define LKMC_VECTOR_IRQ_SP0 (0x2)
  77. #define LKMC_VECTOR_FIQ_SP0 (0x3)
  78. #define LKMC_VECTOR_SERR_SP0 (0x4)
  79. #define LKMC_VECTOR_SYNC_SPX (0x11)
  80. #define LKMC_VECTOR_IRQ_SPX (0x12)
  81. #define LKMC_VECTOR_FIQ_SPX (0x13)
  82. #define LKMC_VECTOR_SERR_SPX (0x14)
  83. #define LKMC_VECTOR_SYNC_AARCH64 (0x21)
  84. #define LKMC_VECTOR_IRQ_AARCH64 (0x22)
  85. #define LKMC_VECTOR_FIQ_AARCH64 (0x23)
  86. #define LKMC_VECTOR_SERR_AARCH64 (0x24)
  87. #define LKMC_VECTOR_SYNC_AARCH32 (0x31)
  88. #define LKMC_VECTOR_IRQ_AARCH32 (0x32)
  89. #define LKMC_VECTOR_FIQ_AARCH32 (0x33)
  90. #define LKMC_VECTOR_SERR_AARCH32 (0x34)
  91. #define LKMC_VECTOR_EXC_FRAME_SIZE (288) /* sizeof(lkmc_vector_exception_frame) */
  92. #define LKMC_VECTOR_EXC_EXC_TYPE_OFFSET (0) /* offsetof(lkmc_vector_exception_frame, exc_type) */
  93. #define LKMC_VECTOR_EXC_EXC_ESR_OFFSET (8) /* offsetof(lkmc_vector_exception_frame, exc_esr) */
  94. #define LKMC_VECTOR_EXC_EXC_SP_OFFSET (16) /* offsetof(lkmc_vector_exception_frame, exc_sp) */
  95. #define LKMC_VECTOR_EXC_EXC_ELR_OFFSET (24) /* offsetof(lkmc_vector_exception_frame, exc_elr) */
  96. #define LKMC_VECTOR_EXC_EXC_SPSR_OFFSET (32) /* offsetof(lkmc_vector_exception_frame, exc_spsr) */
  97. #define LKMC_ESR_EC_SVC_AARCH32 (0x11)
  98. #define LKMC_ESR_EC_SVC_AARCH64 (0x15)
  99. #define LKMC_VECTOR_FUNC_ALIGN .align 2
  100. #define LKMC_VECTOR_SYMBOL_PREFIX lkmc_vector_
  101. /* Push several registers on the stack to match LkmcVectorExceptionFrame. */
  102. #define LKMC_VECTOR_BUILD_TRAPFRAME(exc_type, func_name) \
  103. LKMC_GLOBAL(LKMC_CONCAT(LKMC_CONCAT(LKMC_VECTOR_SYMBOL_PREFIX, build_trapframe_), func_name)) \
  104. stp x29, x30, [sp, -16]!; \
  105. stp x27, x28, [sp, -16]!; \
  106. stp x25, x26, [sp, -16]!; \
  107. stp x23, x24, [sp, -16]!; \
  108. stp x21, x22, [sp, -16]!; \
  109. stp x19, x20, [sp, -16]!; \
  110. stp x17, x18, [sp, -16]!; \
  111. stp x15, x16, [sp, -16]!; \
  112. stp x13, x14, [sp, -16]!; \
  113. stp x11, x12, [sp, -16]!; \
  114. stp x9, x10, [sp, -16]!; \
  115. stp x7, x8, [sp, -16]!; \
  116. stp x5, x6, [sp, -16]!; \
  117. stp x3, x4, [sp, -16]!; \
  118. stp x1, x2, [sp, -16]!; \
  119. mrs x21, spsr_el1; \
  120. stp x21, x0, [sp, -16]!; \
  121. mrs x21, elr_el1; \
  122. stp xzr, x21, [sp, -16]!; \
  123. mov x21, exc_type; \
  124. mrs x22, esr_el1; \
  125. stp x21, x22, [sp, -16]!
  126. #define LKMC_VECTOR_STORE_TRAPED_SP \
  127. mrs x21, sp_el0; \
  128. str x21, [sp, LKMC_VECTOR_EXC_EXC_SP_OFFSET]
  129. #define LKMC_VECTOR_CALL_TRAP_HANDLER \
  130. mov x0, sp; \
  131. bl lkmc_vector_trap_handler
  132. #define LKMC_VECTOR_STORE_NESTED_SP \
  133. mov x21, sp; \
  134. add x21, x21, LKMC_VECTOR_EXC_FRAME_SIZE; \
  135. str x21, [sp, LKMC_VECTOR_EXC_EXC_SP_OFFSET]
  136. #define LKMC_VECTOR_RESTORE_TRAPED_SP \
  137. ldr x21, [sp, LKMC_VECTOR_EXC_EXC_SP_OFFSET]; \
  138. msr sp_el0, x21
  139. #define LKMC_VECTOR_RESTORE_TRAPFRAME \
  140. add sp, sp, 16; \
  141. ldp x21, x22, [sp], 16; \
  142. msr elr_el1, x22; \
  143. ldp x21, x0, [sp], 16; \
  144. msr spsr_el1, x21; \
  145. ldp x1, x2, [sp], 16; \
  146. ldp x3, x4, [sp], 16; \
  147. ldp x5, x6, [sp], 16; \
  148. ldp x7, x8, [sp], 16; \
  149. ldp x9, x10, [sp], 16; \
  150. ldp x11, x12, [sp], 16; \
  151. ldp x13, x14, [sp], 16; \
  152. ldp x15, x16, [sp], 16; \
  153. ldp x17, x18, [sp], 16; \
  154. ldp x19, x20, [sp], 16; \
  155. ldp x21, x22, [sp], 16; \
  156. ldp x23, x24, [sp], 16; \
  157. ldp x25, x26, [sp], 16; \
  158. ldp x27, x28, [sp], 16; \
  159. ldp x29, x30, [sp], 16; \
  160. eret
  161. #define LKMC_VECTOR_ENTRY(func_name) \
  162. .align 7; \
  163. b LKMC_CONCAT(LKMC_CONCAT(LKMC_VECTOR_SYMBOL_PREFIX, entry_), func_name)
  164. #define LKMC_VECTOR_FUNC(func_name, func_id) \
  165. LKMC_VECTOR_FUNC_ALIGN; \
  166. LKMC_CONCAT(LKMC_CONCAT(LKMC_VECTOR_SYMBOL_PREFIX, entry_), func_name):; \
  167. LKMC_VECTOR_BUILD_TRAPFRAME(func_id, func_name); \
  168. LKMC_VECTOR_STORE_TRAPED_SP; \
  169. LKMC_VECTOR_CALL_TRAP_HANDLER; \
  170. LKMC_VECTOR_RESTORE_TRAPED_SP; \
  171. LKMC_VECTOR_RESTORE_TRAPFRAME
  172. #define LKMC_VECTOR_FUNC_NESTED(func_name, func_id) \
  173. LKMC_VECTOR_FUNC_ALIGN; \
  174. LKMC_CONCAT(LKMC_CONCAT(LKMC_VECTOR_SYMBOL_PREFIX, entry_), func_name):; \
  175. LKMC_VECTOR_BUILD_TRAPFRAME(func_id, func_name); \
  176. LKMC_VECTOR_STORE_NESTED_SP; \
  177. LKMC_VECTOR_CALL_TRAP_HANDLER; \
  178. LKMC_VECTOR_RESTORE_TRAPFRAME
  179. /* Define the actual vector table. */
  180. #define LKMC_VECTOR_TABLE \
  181. .align 11; \
  182. LKMC_GLOBAL(lkmc_vector_table); \
  183. ; \
  184. LKMC_VECTOR_ENTRY(curr_el_sp0_sync); \
  185. LKMC_VECTOR_ENTRY(curr_el_sp0_irq); \
  186. LKMC_VECTOR_ENTRY(curr_el_sp0_fiq); \
  187. LKMC_VECTOR_ENTRY(curr_el_sp0_serror); \
  188. ; \
  189. LKMC_VECTOR_ENTRY(curr_el_spx_sync); \
  190. LKMC_VECTOR_ENTRY(curr_el_spx_irq); \
  191. LKMC_VECTOR_ENTRY(curr_el_spx_fiq); \
  192. LKMC_VECTOR_ENTRY(curr_el_spx_serror); \
  193. ; \
  194. LKMC_VECTOR_ENTRY(lower_el_aarch64_sync); \
  195. LKMC_VECTOR_ENTRY(lower_el_aarch64_irq); \
  196. LKMC_VECTOR_ENTRY(lower_el_aarch64_fiq); \
  197. LKMC_VECTOR_ENTRY(lower_el_aarch64_serror); \
  198. ; \
  199. LKMC_VECTOR_ENTRY(lower_el_aarch32_sync); \
  200. LKMC_VECTOR_ENTRY(lower_el_aarch32_irq); \
  201. LKMC_VECTOR_ENTRY(lower_el_aarch32_fiq); \
  202. LKMC_VECTOR_ENTRY(lower_el_aarch32_serror); \
  203. ; \
  204. LKMC_VECTOR_FUNC(curr_el_sp0_sync, LKMC_VECTOR_SYNC_SP0); \
  205. LKMC_VECTOR_FUNC(curr_el_sp0_irq, LKMC_VECTOR_IRQ_SP0); \
  206. LKMC_VECTOR_FUNC(curr_el_sp0_fiq, LKMC_VECTOR_FIQ_SP0); \
  207. LKMC_VECTOR_FUNC(curr_el_sp0_serror, LKMC_VECTOR_SERR_SP0); \
  208. ; \
  209. LKMC_VECTOR_FUNC_NESTED(curr_el_spx_sync, LKMC_VECTOR_SYNC_SPX); \
  210. LKMC_VECTOR_FUNC_NESTED(curr_el_spx_irq, LKMC_VECTOR_IRQ_SPX); \
  211. LKMC_VECTOR_FUNC_NESTED(curr_el_spx_fiq, LKMC_VECTOR_FIQ_SPX); \
  212. LKMC_VECTOR_FUNC_NESTED(curr_el_spx_serror, LKMC_VECTOR_SERR_SPX); \
  213. ; \
  214. LKMC_VECTOR_FUNC(lower_el_aarch64_sync, LKMC_VECTOR_SYNC_AARCH64); \
  215. LKMC_VECTOR_FUNC(lower_el_aarch64_irq, LKMC_VECTOR_IRQ_AARCH64); \
  216. LKMC_VECTOR_FUNC(lower_el_aarch64_fiq, LKMC_VECTOR_FIQ_AARCH64); \
  217. LKMC_VECTOR_FUNC(lower_el_aarch64_serror, LKMC_VECTOR_SERR_AARCH64); \
  218. ; \
  219. LKMC_VECTOR_FUNC(lower_el_aarch32_sync, LKMC_VECTOR_SYNC_AARCH32); \
  220. LKMC_VECTOR_FUNC(lower_el_aarch32_irq, LKMC_VECTOR_IRQ_AARCH32); \
  221. LKMC_VECTOR_FUNC(lower_el_aarch32_fiq, LKMC_VECTOR_FIQ_AARCH32); \
  222. LKMC_VECTOR_FUNC(lower_el_aarch32_serror, LKMC_VECTOR_SERR_AARCH32)
  223. /* aarch64 C definitions. */
  224. #if !defined(__ASSEMBLER__)
  225. #include <stdint.h>
  226. typedef struct {
  227. uint64_t exc_type;
  228. uint64_t exc_esr;
  229. uint64_t exc_sp;
  230. uint64_t exc_elr;
  231. uint64_t exc_spsr;
  232. uint64_t x0;
  233. uint64_t x1;
  234. uint64_t x2;
  235. uint64_t x3;
  236. uint64_t x4;
  237. uint64_t x5;
  238. uint64_t x6;
  239. uint64_t x7;
  240. uint64_t x8;
  241. uint64_t x9;
  242. uint64_t x10;
  243. uint64_t x11;
  244. uint64_t x12;
  245. uint64_t x13;
  246. uint64_t x14;
  247. uint64_t x15;
  248. uint64_t x16;
  249. uint64_t x17;
  250. uint64_t x18;
  251. uint64_t x19;
  252. uint64_t x20;
  253. uint64_t x21;
  254. uint64_t x22;
  255. uint64_t x23;
  256. uint64_t x24;
  257. uint64_t x25;
  258. uint64_t x26;
  259. uint64_t x27;
  260. uint64_t x28;
  261. uint64_t x29;
  262. uint64_t x30;
  263. } LkmcVectorExceptionFrame;
  264. void lkmc_vector_trap_handler(LkmcVectorExceptionFrame *exception);
  265. void lkmc_cpu_not_0(uint64_t cpuid);
  266. /* Sysreg read and write functions, e.g.:
  267. *
  268. * * lkmc_sysreg_read_daif(void);
  269. * * lkmc_sysreg_write_daif(uint64_t);
  270. * * lkmc_sysreg_print_daif(void);
  271. */
  272. #define LKMC_SYSREG_SYMBOL_PREFIX lkmc_sysreg_
  273. #define LKMC_SYSREG_READ_WRITE(nbits, name) \
  274. LKMC_CONCAT(LKMC_CONCAT(uint, nbits), _t) LKMC_CONCAT(LKMC_CONCAT(LKMC_SYSREG_SYMBOL_PREFIX, read_), name)(void); \
  275. void LKMC_CONCAT(LKMC_CONCAT(LKMC_SYSREG_SYMBOL_PREFIX, write_), name)(LKMC_CONCAT(LKMC_CONCAT(uint, nbits), _t) name); \
  276. void LKMC_CONCAT(LKMC_CONCAT(LKMC_SYSREG_SYMBOL_PREFIX, print_), name)(void);
  277. #define LKMC_SYSREG_OPS \
  278. LKMC_SYSREG_READ_WRITE(32, cntv_ctl_el0) \
  279. LKMC_SYSREG_READ_WRITE(64, cntfrq_el0) \
  280. LKMC_SYSREG_READ_WRITE(64, cntv_cval_el0) \
  281. LKMC_SYSREG_READ_WRITE(64, cntv_tval_el0) \
  282. LKMC_SYSREG_READ_WRITE(64, cntvct_el0) \
  283. LKMC_SYSREG_READ_WRITE(64, daif) \
  284. LKMC_SYSREG_READ_WRITE(64, mpidr_el1) \
  285. LKMC_SYSREG_READ_WRITE(64, sp_el1) \
  286. LKMC_SYSREG_READ_WRITE(32, spsel) \
  287. LKMC_SYSREG_READ_WRITE(64, vbar_el1)
  288. LKMC_SYSREG_OPS
  289. #undef LKMC_SYSREG_READ_WRITE
  290. /* Determine what is the ID of the currently running CPU. */
  291. uint64_t lkmc_aarch64_cpu_id();
  292. void lkmc_aarch64_psci_cpu_on(
  293. uint64_t target_cpu,
  294. uint64_t entry_point_address,
  295. uint64_t context_id
  296. );
  297. #endif
  298. #endif