|
52 | 52 | * there must not be any returns or indirect branches between this code |
53 | 53 | * and vmentry. |
54 | 54 | */ |
55 | | - movl SVM_spec_ctrl(%_ASM_DI), %eax |
56 | | - cmp PER_CPU_VAR(x86_spec_ctrl_current), %eax |
| 55 | +#ifdef CONFIG_X86_64 |
| 56 | + mov SVM_spec_ctrl(%rdi), %rdx |
| 57 | + cmp PER_CPU_VAR(x86_spec_ctrl_current), %rdx |
| 58 | + je 801b |
| 59 | + movl %edx, %eax |
| 60 | + shr $32, %rdx |
| 61 | +#else |
| 62 | + mov SVM_spec_ctrl(%edi), %eax |
| 63 | + mov PER_CPU_VAR(x86_spec_ctrl_current), %ecx |
| 64 | + xor %eax, %ecx |
| 65 | + mov SVM_spec_ctrl + 4(%edi), %edx |
| 66 | + mov PER_CPU_VAR(x86_spec_ctrl_current + 4), %esi |
| 67 | + xor %edx, %esi |
| 68 | + or %esi, %ecx |
57 | 69 | je 801b |
| 70 | +#endif |
58 | 71 | mov $MSR_IA32_SPEC_CTRL, %ecx |
59 | | - xor %edx, %edx |
60 | 72 | wrmsr |
61 | 73 | jmp 801b |
62 | 74 | .endm |
|
81 | 93 | jnz 998f |
82 | 94 | rdmsr |
83 | 95 | movl %eax, SVM_spec_ctrl(%_ASM_DI) |
| 96 | + movl %edx, SVM_spec_ctrl + 4(%_ASM_DI) |
84 | 97 | 998: |
85 | | - |
86 | 98 | /* Now restore the host value of the MSR if different from the guest's. */ |
87 | | - movl PER_CPU_VAR(x86_spec_ctrl_current), %eax |
88 | | - cmp SVM_spec_ctrl(%_ASM_DI), %eax |
| 99 | +#ifdef CONFIG_X86_64 |
| 100 | + mov PER_CPU_VAR(x86_spec_ctrl_current), %rdx |
| 101 | + cmp SVM_spec_ctrl(%rdi), %rdx |
89 | 102 | je 901b |
90 | | - xor %edx, %edx |
| 103 | + movl %edx, %eax |
| 104 | + shr $32, %rdx |
| 105 | +#else |
| 106 | + mov PER_CPU_VAR(x86_spec_ctrl_current), %eax |
| 107 | + mov SVM_spec_ctrl(%edi), %esi |
| 108 | + xor %eax, %esi |
| 109 | + mov PER_CPU_VAR(x86_spec_ctrl_current + 4), %edx |
| 110 | + mov SVM_spec_ctrl + 4(%edi), %edi |
| 111 | + xor %edx, %edi |
| 112 | + or %edi, %esi |
| 113 | + je 901b |
| 114 | +#endif |
91 | 115 | wrmsr |
92 | 116 | jmp 901b |
93 | 117 | .endm |
@@ -134,7 +158,7 @@ SYM_FUNC_START(__svm_vcpu_run) |
134 | 158 | mov %_ASM_ARG1, %_ASM_DI |
135 | 159 | .endif |
136 | 160 |
|
137 | | - /* Clobbers RAX, RCX, RDX. */ |
| 161 | + /* Clobbers RAX, RCX, RDX (and ESI on 32-bit), consumes RDI (@svm). */ |
138 | 162 | RESTORE_GUEST_SPEC_CTRL |
139 | 163 |
|
140 | 164 | /* |
@@ -211,7 +235,10 @@ SYM_FUNC_START(__svm_vcpu_run) |
211 | 235 | /* IMPORTANT: Stuff the RSB immediately after VM-Exit, before RET! */ |
212 | 236 | FILL_RETURN_BUFFER %_ASM_AX, RSB_CLEAR_LOOPS, X86_FEATURE_RSB_VMEXIT |
213 | 237 |
|
214 | | - /* Clobbers RAX, RCX, RDX. */ |
| 238 | + /* |
| 239 | + * Clobbers RAX, RCX, RDX (and ESI, EDI on 32-bit), consumes RDI (@svm) |
| 240 | + * and RSP (pointer to @spec_ctrl_intercepted). |
| 241 | + */ |
215 | 242 | RESTORE_HOST_SPEC_CTRL |
216 | 243 |
|
217 | 244 | /* |
@@ -331,7 +358,7 @@ SYM_FUNC_START(__svm_sev_es_vcpu_run) |
331 | 358 | mov %rdi, SEV_ES_RDI (%rdx) |
332 | 359 | mov %rsi, SEV_ES_RSI (%rdx) |
333 | 360 |
|
334 | | - /* Clobbers RAX, RCX, RDX (@hostsa). */ |
| 361 | + /* Clobbers RAX, RCX, and RDX (@hostsa), consumes RDI (@svm). */ |
335 | 362 | RESTORE_GUEST_SPEC_CTRL |
336 | 363 |
|
337 | 364 | /* Get svm->current_vmcb->pa into RAX. */ |
|
0 commit comments