vgic-v3-switch.S 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269
  1. /*
  2. * Copyright (C) 2012,2013 - ARM Ltd
  3. * Author: Marc Zyngier <marc.zyngier@arm.com>
  4. *
  5. * This program is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License version 2 as
  7. * published by the Free Software Foundation.
  8. *
  9. * This program is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. * GNU General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU General Public License
  15. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  16. */
  17. #include <linux/linkage.h>
  18. #include <linux/irqchip/arm-gic-v3.h>
  19. #include <asm/assembler.h>
  20. #include <asm/memory.h>
  21. #include <asm/asm-offsets.h>
  22. #include <asm/kvm.h>
  23. #include <asm/kvm_asm.h>
  24. #include <asm/kvm_arm.h>
  25. .text
  26. .pushsection .hyp.text, "ax"
  27. /*
  28. * We store LRs in reverse order to let the CPU deal with streaming
  29. * access. Use this macro to make it look saner...
  30. */
  31. #define LR_OFFSET(n) (VGIC_V3_CPU_LR + (15 - n) * 8)
  32. /*
  33. * Save the VGIC CPU state into memory
  34. * x0: Register pointing to VCPU struct
  35. * Do not corrupt x1!!!
  36. */
  37. .macro save_vgic_v3_state
  38. // Compute the address of struct vgic_cpu
  39. add x3, x0, #VCPU_VGIC_CPU
  40. // Make sure stores to the GIC via the memory mapped interface
  41. // are now visible to the system register interface
  42. dsb st
  43. // Save all interesting registers
  44. mrs_s x5, ICH_VMCR_EL2
  45. mrs_s x6, ICH_MISR_EL2
  46. mrs_s x7, ICH_EISR_EL2
  47. mrs_s x8, ICH_ELSR_EL2
  48. str w5, [x3, #VGIC_V3_CPU_VMCR]
  49. str w6, [x3, #VGIC_V3_CPU_MISR]
  50. str w7, [x3, #VGIC_V3_CPU_EISR]
  51. str w8, [x3, #VGIC_V3_CPU_ELRSR]
  52. msr_s ICH_HCR_EL2, xzr
  53. mrs_s x21, ICH_VTR_EL2
  54. mvn w22, w21
  55. ubfiz w23, w22, 2, 4 // w23 = (15 - ListRegs) * 4
  56. adr x24, 1f
  57. add x24, x24, x23
  58. br x24
  59. 1:
  60. mrs_s x20, ICH_LR15_EL2
  61. mrs_s x19, ICH_LR14_EL2
  62. mrs_s x18, ICH_LR13_EL2
  63. mrs_s x17, ICH_LR12_EL2
  64. mrs_s x16, ICH_LR11_EL2
  65. mrs_s x15, ICH_LR10_EL2
  66. mrs_s x14, ICH_LR9_EL2
  67. mrs_s x13, ICH_LR8_EL2
  68. mrs_s x12, ICH_LR7_EL2
  69. mrs_s x11, ICH_LR6_EL2
  70. mrs_s x10, ICH_LR5_EL2
  71. mrs_s x9, ICH_LR4_EL2
  72. mrs_s x8, ICH_LR3_EL2
  73. mrs_s x7, ICH_LR2_EL2
  74. mrs_s x6, ICH_LR1_EL2
  75. mrs_s x5, ICH_LR0_EL2
  76. adr x24, 1f
  77. add x24, x24, x23
  78. br x24
  79. 1:
  80. str x20, [x3, #LR_OFFSET(15)]
  81. str x19, [x3, #LR_OFFSET(14)]
  82. str x18, [x3, #LR_OFFSET(13)]
  83. str x17, [x3, #LR_OFFSET(12)]
  84. str x16, [x3, #LR_OFFSET(11)]
  85. str x15, [x3, #LR_OFFSET(10)]
  86. str x14, [x3, #LR_OFFSET(9)]
  87. str x13, [x3, #LR_OFFSET(8)]
  88. str x12, [x3, #LR_OFFSET(7)]
  89. str x11, [x3, #LR_OFFSET(6)]
  90. str x10, [x3, #LR_OFFSET(5)]
  91. str x9, [x3, #LR_OFFSET(4)]
  92. str x8, [x3, #LR_OFFSET(3)]
  93. str x7, [x3, #LR_OFFSET(2)]
  94. str x6, [x3, #LR_OFFSET(1)]
  95. str x5, [x3, #LR_OFFSET(0)]
  96. tbnz w21, #29, 6f // 6 bits
  97. tbz w21, #30, 5f // 5 bits
  98. // 7 bits
  99. mrs_s x20, ICH_AP0R3_EL2
  100. str w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
  101. mrs_s x19, ICH_AP0R2_EL2
  102. str w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
  103. 6: mrs_s x18, ICH_AP0R1_EL2
  104. str w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
  105. 5: mrs_s x17, ICH_AP0R0_EL2
  106. str w17, [x3, #VGIC_V3_CPU_AP0R]
  107. tbnz w21, #29, 6f // 6 bits
  108. tbz w21, #30, 5f // 5 bits
  109. // 7 bits
  110. mrs_s x20, ICH_AP1R3_EL2
  111. str w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
  112. mrs_s x19, ICH_AP1R2_EL2
  113. str w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
  114. 6: mrs_s x18, ICH_AP1R1_EL2
  115. str w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
  116. 5: mrs_s x17, ICH_AP1R0_EL2
  117. str w17, [x3, #VGIC_V3_CPU_AP1R]
  118. // Restore SRE_EL1 access and re-enable SRE at EL1.
  119. mrs_s x5, ICC_SRE_EL2
  120. orr x5, x5, #ICC_SRE_EL2_ENABLE
  121. msr_s ICC_SRE_EL2, x5
  122. isb
  123. mov x5, #1
  124. msr_s ICC_SRE_EL1, x5
  125. .endm
  126. /*
  127. * Restore the VGIC CPU state from memory
  128. * x0: Register pointing to VCPU struct
  129. */
  130. .macro restore_vgic_v3_state
  131. // Compute the address of struct vgic_cpu
  132. add x3, x0, #VCPU_VGIC_CPU
  133. // Restore all interesting registers
  134. ldr w4, [x3, #VGIC_V3_CPU_HCR]
  135. ldr w5, [x3, #VGIC_V3_CPU_VMCR]
  136. ldr w25, [x3, #VGIC_V3_CPU_SRE]
  137. msr_s ICC_SRE_EL1, x25
  138. // make sure SRE is valid before writing the other registers
  139. isb
  140. msr_s ICH_HCR_EL2, x4
  141. msr_s ICH_VMCR_EL2, x5
  142. mrs_s x21, ICH_VTR_EL2
  143. tbnz w21, #29, 6f // 6 bits
  144. tbz w21, #30, 5f // 5 bits
  145. // 7 bits
  146. ldr w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
  147. msr_s ICH_AP1R3_EL2, x20
  148. ldr w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
  149. msr_s ICH_AP1R2_EL2, x19
  150. 6: ldr w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
  151. msr_s ICH_AP1R1_EL2, x18
  152. 5: ldr w17, [x3, #VGIC_V3_CPU_AP1R]
  153. msr_s ICH_AP1R0_EL2, x17
  154. tbnz w21, #29, 6f // 6 bits
  155. tbz w21, #30, 5f // 5 bits
  156. // 7 bits
  157. ldr w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
  158. msr_s ICH_AP0R3_EL2, x20
  159. ldr w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
  160. msr_s ICH_AP0R2_EL2, x19
  161. 6: ldr w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
  162. msr_s ICH_AP0R1_EL2, x18
  163. 5: ldr w17, [x3, #VGIC_V3_CPU_AP0R]
  164. msr_s ICH_AP0R0_EL2, x17
  165. and w22, w21, #0xf
  166. mvn w22, w21
  167. ubfiz w23, w22, 2, 4 // w23 = (15 - ListRegs) * 4
  168. adr x24, 1f
  169. add x24, x24, x23
  170. br x24
  171. 1:
  172. ldr x20, [x3, #LR_OFFSET(15)]
  173. ldr x19, [x3, #LR_OFFSET(14)]
  174. ldr x18, [x3, #LR_OFFSET(13)]
  175. ldr x17, [x3, #LR_OFFSET(12)]
  176. ldr x16, [x3, #LR_OFFSET(11)]
  177. ldr x15, [x3, #LR_OFFSET(10)]
  178. ldr x14, [x3, #LR_OFFSET(9)]
  179. ldr x13, [x3, #LR_OFFSET(8)]
  180. ldr x12, [x3, #LR_OFFSET(7)]
  181. ldr x11, [x3, #LR_OFFSET(6)]
  182. ldr x10, [x3, #LR_OFFSET(5)]
  183. ldr x9, [x3, #LR_OFFSET(4)]
  184. ldr x8, [x3, #LR_OFFSET(3)]
  185. ldr x7, [x3, #LR_OFFSET(2)]
  186. ldr x6, [x3, #LR_OFFSET(1)]
  187. ldr x5, [x3, #LR_OFFSET(0)]
  188. adr x24, 1f
  189. add x24, x24, x23
  190. br x24
  191. 1:
  192. msr_s ICH_LR15_EL2, x20
  193. msr_s ICH_LR14_EL2, x19
  194. msr_s ICH_LR13_EL2, x18
  195. msr_s ICH_LR12_EL2, x17
  196. msr_s ICH_LR11_EL2, x16
  197. msr_s ICH_LR10_EL2, x15
  198. msr_s ICH_LR9_EL2, x14
  199. msr_s ICH_LR8_EL2, x13
  200. msr_s ICH_LR7_EL2, x12
  201. msr_s ICH_LR6_EL2, x11
  202. msr_s ICH_LR5_EL2, x10
  203. msr_s ICH_LR4_EL2, x9
  204. msr_s ICH_LR3_EL2, x8
  205. msr_s ICH_LR2_EL2, x7
  206. msr_s ICH_LR1_EL2, x6
  207. msr_s ICH_LR0_EL2, x5
  208. // Ensure that the above will have reached the
  209. // (re)distributors. This ensure the guest will read
  210. // the correct values from the memory-mapped interface.
  211. isb
  212. dsb sy
  213. // Prevent the guest from touching the GIC system registers
  214. // if SRE isn't enabled for GICv3 emulation
  215. cbnz x25, 1f
  216. mrs_s x5, ICC_SRE_EL2
  217. and x5, x5, #~ICC_SRE_EL2_ENABLE
  218. msr_s ICC_SRE_EL2, x5
  219. 1:
  220. .endm
  221. ENTRY(__save_vgic_v3_state)
  222. save_vgic_v3_state
  223. ret
  224. ENDPROC(__save_vgic_v3_state)
  225. ENTRY(__restore_vgic_v3_state)
  226. restore_vgic_v3_state
  227. ret
  228. ENDPROC(__restore_vgic_v3_state)
  229. ENTRY(__vgic_v3_get_ich_vtr_el2)
  230. mrs_s x0, ICH_VTR_EL2
  231. ret
  232. ENDPROC(__vgic_v3_get_ich_vtr_el2)
  233. .popsection