Commit 217aaff5 authored by Sean Christopherson's avatar Sean Christopherson Committed by Paolo Bonzini

KVM: VMX: Invert the ordering of saving guest/host scratch reg at VM-Enter

Switching the ordering allows for an out-of-line path for VM-Fail
that elides saving guest state but still shares the register clearing
with the VM-Exit path.
Signed-off-by: default avatarSean Christopherson <sean.j.christopherson@intel.com>
Signed-off-by: default avatarPaolo Bonzini <pbonzini@redhat.com>
parent c9afc58c
...@@ -6382,7 +6382,6 @@ static void __vmx_vcpu_run(struct kvm_vcpu *vcpu, struct vcpu_vmx *vmx) ...@@ -6382,7 +6382,6 @@ static void __vmx_vcpu_run(struct kvm_vcpu *vcpu, struct vcpu_vmx *vmx)
asm( asm(
/* Store host registers */ /* Store host registers */
"push %%" _ASM_BP " \n\t" "push %%" _ASM_BP " \n\t"
"sub $%c[wordsize], %%" _ASM_SP "\n\t" /* placeholder for guest RCX */
"push %%" _ASM_ARG1 " \n\t" "push %%" _ASM_ARG1 " \n\t"
/* Adjust RSP to account for the CALL to vmx_vmenter(). */ /* Adjust RSP to account for the CALL to vmx_vmenter(). */
...@@ -6418,11 +6417,11 @@ static void __vmx_vcpu_run(struct kvm_vcpu *vcpu, struct vcpu_vmx *vmx) ...@@ -6418,11 +6417,11 @@ static void __vmx_vcpu_run(struct kvm_vcpu *vcpu, struct vcpu_vmx *vmx)
/* Enter guest mode */ /* Enter guest mode */
"call vmx_vmenter\n\t" "call vmx_vmenter\n\t"
/* Save guest's RCX to the stack placeholder (see above) */ /* Temporarily save guest's RCX. */
"mov %%" _ASM_CX ", %c[wordsize](%%" _ASM_SP ") \n\t" "push %%" _ASM_CX " \n\t"
/* Load host's RCX, i.e. the vmx_vcpu pointer */ /* Reload the vcpu_vmx pointer to RCX. */
"pop %%" _ASM_CX " \n\t" "mov %c[wordsize](%%" _ASM_SP "), %%" _ASM_CX " \n\t"
/* Set vmx->fail based on EFLAGS.{CF,ZF} */ /* Set vmx->fail based on EFLAGS.{CF,ZF} */
"setbe %c[fail](%%" _ASM_CX ")\n\t" "setbe %c[fail](%%" _ASM_CX ")\n\t"
...@@ -6469,6 +6468,9 @@ static void __vmx_vcpu_run(struct kvm_vcpu *vcpu, struct vcpu_vmx *vmx) ...@@ -6469,6 +6468,9 @@ static void __vmx_vcpu_run(struct kvm_vcpu *vcpu, struct vcpu_vmx *vmx)
"xor %%esi, %%esi \n\t" "xor %%esi, %%esi \n\t"
"xor %%edi, %%edi \n\t" "xor %%edi, %%edi \n\t"
"xor %%ebp, %%ebp \n\t" "xor %%ebp, %%ebp \n\t"
/* "POP" the vcpu_vmx pointer. */
"add $%c[wordsize], %%" _ASM_SP " \n\t"
"pop %%" _ASM_BP " \n\t" "pop %%" _ASM_BP " \n\t"
: ASM_CALL_CONSTRAINT, "=b"((int){0}), : ASM_CALL_CONSTRAINT, "=b"((int){0}),
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment