KVM/arm64 fixes for 5.11, take #3

- Avoid clobbering extra registers on initialisation
 -----BEGIN PGP SIGNATURE-----
 
 iQJDBAABCgAtFiEEn9UcU+C1Yxj9lZw9I9DQutE9ekMFAmAS8woPHG1hekBrZXJu
 ZWwub3JnAAoJECPQ0LrRPXpDlA8QAMViqFlguoOr01uesh1BC+Mdj+yBnxPneAVi
 7CskUNTryqTnnx+AoVJp25BZzdOz1E+bExj2KSrjn5HF3jOiML8tWJDXIjtw/VHT
 ibSZ37PB5GX755T4JciNRJIlMA8VvFYdzvaDOB9Ue1HHJLtzOnuL3jM1y1gtx6l8
 I/zQpzqrQ+4J4xA41x9FtwJLqSS68Pnf9v+ZBBjH+Quv54uyhcaWK0UvWwitHsGY
 QC5ihf/98u39/3kOSDxFiTzR0uMPhA9w6Qj/6Sr/ycMRCxsNgf9r1rC8axIE2WlR
 L4SaD2A793bhumwlXkaDxTE1YS0CNb00fGAaG//VTK8dBpejEYbUjm8sVwyhLMNG
 wlTWXoN3B1bWhfElhD06Q7fVk5muTTI7E7IMpkP5CffBDn+l3knYq33cVps5VZzV
 /Jph3q+OfQtgLr0AYOCy+I5PXJjFJZq3HH/LhQoWHMibDjuAfX/AYWVxuRpbiozI
 HG2+VodSV2VOgf7ng3A5Q7HWeqpdiF9Yqu+ZoACO5hso6YxlniO4CAf21ABf1qUF
 FJOZrB8YUP8AjPDvBYgjKXlt272ogUC5FF0ZLhU6yoMS4uPAjme52bVDKFPeagmp
 1PopPzGy2z3lkpXoMH4iOosIE76oa0D4E62udt4uAKTYjmA/kxdGbJu3IRVxOYv2
 deaZYoi2
 =LLd9
 -----END PGP SIGNATURE-----

Merge tag 'kvmarm-fixes-5.11-3' of git://git.kernel.org/pub/scm/linux/kernel/git/kvmarm/kvmarm into HEAD

KVM/arm64 fixes for 5.11, take #3

- Avoid clobbering extra registers on initialisation
This commit is contained in:
Paolo Bonzini 2021-01-28 13:02:49 -05:00
commit 074489b77a

View file

@ -47,6 +47,8 @@ __invalid:
b . b .
/* /*
* Only uses x0..x3 so as to not clobber callee-saved SMCCC registers.
*
* x0: SMCCC function ID * x0: SMCCC function ID
* x1: struct kvm_nvhe_init_params PA * x1: struct kvm_nvhe_init_params PA
*/ */
@ -70,9 +72,9 @@ __do_hyp_init:
eret eret
1: mov x0, x1 1: mov x0, x1
mov x4, lr mov x3, lr
bl ___kvm_hyp_init bl ___kvm_hyp_init // Clobbers x0..x2
mov lr, x4 mov lr, x3
/* Hello, World! */ /* Hello, World! */
mov x0, #SMCCC_RET_SUCCESS mov x0, #SMCCC_RET_SUCCESS
@ -82,8 +84,8 @@ SYM_CODE_END(__kvm_hyp_init)
/* /*
* Initialize the hypervisor in EL2. * Initialize the hypervisor in EL2.
* *
* Only uses x0..x3 so as to not clobber callee-saved SMCCC registers * Only uses x0..x2 so as to not clobber callee-saved SMCCC registers
* and leave x4 for the caller. * and leave x3 for the caller.
* *
* x0: struct kvm_nvhe_init_params PA * x0: struct kvm_nvhe_init_params PA
*/ */
@ -112,9 +114,9 @@ alternative_else_nop_endif
/* /*
* Set the PS bits in TCR_EL2. * Set the PS bits in TCR_EL2.
*/ */
ldr x1, [x0, #NVHE_INIT_TCR_EL2] ldr x0, [x0, #NVHE_INIT_TCR_EL2]
tcr_compute_pa_size x1, #TCR_EL2_PS_SHIFT, x2, x3 tcr_compute_pa_size x0, #TCR_EL2_PS_SHIFT, x1, x2
msr tcr_el2, x1 msr tcr_el2, x0
isb isb
@ -193,7 +195,7 @@ SYM_CODE_START_LOCAL(__kvm_hyp_init_cpu)
/* Enable MMU, set vectors and stack. */ /* Enable MMU, set vectors and stack. */
mov x0, x28 mov x0, x28
bl ___kvm_hyp_init // Clobbers x0..x3 bl ___kvm_hyp_init // Clobbers x0..x2
/* Leave idmap. */ /* Leave idmap. */
mov x0, x29 mov x0, x29