Searched refs:EFER_LME (Results 1 – 11 of 11) sorted by relevance
179 (run->s.regs.sregs.efer & EFER_LME), in race_sync_regs()182 !!(run->s.regs.sregs.efer & EFER_LME)); in race_sync_regs()
44 #define KVM_MMU_EFER_ROLE_BITS (EFER_LME | EFER_NX)
115 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));1177 if ((vcpu->arch.efer & EFER_LME) && !is_paging(vcpu) && in kvm_set_cr0()1188 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) && in kvm_set_cr0()1737 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()1770 (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()12331 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_is_valid_sregs()
32 #define EFER_LME (1<<_EFER_LME) macro
2280 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()2282 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer()3217 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LME))) in nested_vmx_check_host_state()3357 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME)))) in nested_vmx_check_guest_state()4784 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()4786 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
1170 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()1184 guest_efer &= ~EFER_LME; in update_transition_efer()3531 if (vcpu->arch.efer & EFER_LME) { in vmx_set_cr0()6646 vcpu->arch.efer | (EFER_LMA | EFER_LME)); in dump_vmcs()6649 vcpu->arch.efer & ~(EFER_LMA | EFER_LME)); in dump_vmcs()
696 vcpu->arch.efer = EFER_SCE | EFER_LME | EFER_LMA | EFER_NX; in tdx_vcpu_create()
214 efer &= ~EFER_LME; in svm_set_efer()1726 if (vcpu->arch.efer & EFER_LME) { in svm_set_cr0()1730 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()1736 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
388 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in __nested_vmcb_check_save()
665 sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX); in vcpu_init_sregs()