relocate_kernel.S 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. /*
  2. * kexec for arm64
  3. *
  4. * Copyright (C) Linaro.
  5. * Copyright (C) Huawei Futurewei Technologies.
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License version 2 as
  9. * published by the Free Software Foundation.
  10. */
  11. #include <linux/kexec.h>
  12. #include <linux/linkage.h>
  13. #include <asm/assembler.h>
  14. #include <asm/kexec.h>
  15. #include <asm/page.h>
  16. #include <asm/sysreg.h>
  17. /*
  18. * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it.
  19. *
  20. * The memory that the old kernel occupies may be overwritten when coping the
  21. * new image to its final location. To assure that the
  22. * arm64_relocate_new_kernel routine which does that copy is not overwritten,
  23. * all code and data needed by arm64_relocate_new_kernel must be between the
  24. * symbols arm64_relocate_new_kernel and arm64_relocate_new_kernel_end. The
  25. * machine_kexec() routine will copy arm64_relocate_new_kernel to the kexec
  26. * control_code_page, a special page which has been set up to be preserved
  27. * during the copy operation.
  28. */
  29. ENTRY(arm64_relocate_new_kernel)
  30. /* Setup the list loop variables. */
  31. mov x17, x1 /* x17 = kimage_start */
  32. mov x16, x0 /* x16 = kimage_head */
  33. raw_dcache_line_size x15, x0 /* x15 = dcache line size */
  34. mov x14, xzr /* x14 = entry ptr */
  35. mov x13, xzr /* x13 = copy dest */
  36. /* Clear the sctlr_el2 flags. */
  37. mrs x0, CurrentEL
  38. cmp x0, #CurrentEL_EL2
  39. b.ne 1f
  40. mrs x0, sctlr_el2
  41. ldr x1, =SCTLR_ELx_FLAGS
  42. bic x0, x0, x1
  43. pre_disable_mmu_workaround
  44. msr sctlr_el2, x0
  45. isb
  46. 1:
  47. /* Check if the new image needs relocation. */
  48. tbnz x16, IND_DONE_BIT, .Ldone
  49. .Lloop:
  50. and x12, x16, PAGE_MASK /* x12 = addr */
  51. /* Test the entry flags. */
  52. .Ltest_source:
  53. tbz x16, IND_SOURCE_BIT, .Ltest_indirection
  54. /* Invalidate dest page to PoC. */
  55. mov x0, x13
  56. add x20, x0, #PAGE_SIZE
  57. sub x1, x15, #1
  58. bic x0, x0, x1
  59. 2: dc ivac, x0
  60. add x0, x0, x15
  61. cmp x0, x20
  62. b.lo 2b
  63. dsb sy
  64. mov x20, x13
  65. mov x21, x12
  66. copy_page x20, x21, x0, x1, x2, x3, x4, x5, x6, x7
  67. /* dest += PAGE_SIZE */
  68. add x13, x13, PAGE_SIZE
  69. b .Lnext
  70. .Ltest_indirection:
  71. tbz x16, IND_INDIRECTION_BIT, .Ltest_destination
  72. /* ptr = addr */
  73. mov x14, x12
  74. b .Lnext
  75. .Ltest_destination:
  76. tbz x16, IND_DESTINATION_BIT, .Lnext
  77. /* dest = addr */
  78. mov x13, x12
  79. .Lnext:
  80. /* entry = *ptr++ */
  81. ldr x16, [x14], #8
  82. /* while (!(entry & DONE)) */
  83. tbz x16, IND_DONE_BIT, .Lloop
  84. .Ldone:
  85. /* wait for writes from copy_page to finish */
  86. dsb nsh
  87. ic iallu
  88. dsb nsh
  89. isb
  90. /* Start new image. */
  91. mov x0, xzr
  92. mov x1, xzr
  93. mov x2, xzr
  94. mov x3, xzr
  95. br x17
  96. ENDPROC(arm64_relocate_new_kernel)
  97. .ltorg
  98. .align 3 /* To keep the 64-bit values below naturally aligned. */
  99. .Lcopy_end:
  100. .org KEXEC_CONTROL_PAGE_SIZE
  101. /*
  102. * arm64_relocate_new_kernel_size - Number of bytes to copy to the
  103. * control_code_page.
  104. */
  105. .globl arm64_relocate_new_kernel_size
  106. arm64_relocate_new_kernel_size:
  107. .quad .Lcopy_end - arm64_relocate_new_kernel