Searched refs:EFER_LMA (Results 1 – 12 of 12) sorted by relevance
152 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
796 if (efer & EFER_LMA) in emulator_recalc_and_set_mode() 804 if (efer & EFER_LMA) in emulator_recalc_and_set_mode() 813 if (efer & EFER_LMA) { in emulator_recalc_and_set_mode() 1519 if (!(efer & EFER_LMA)) in get_descriptor_ptr() 1693 if (efer & EFER_LMA) in __load_segment_descriptor() 2396 if (efer & EFER_LMA) { in em_syscall() 2404 if (efer & EFER_LMA) { in em_syscall() 2446 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) && in em_sysenter() 2462 if (efer & EFER_LMA) { in em_sysenter() 2471 ctxt->_eip = (efer & EFER_LMA) in em_sysenter() [all...]
222 return !!(vcpu->arch.efer & EFER_LMA); in is_long_mode()
116 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));1709 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer() 1746 efer &= ~EFER_LMA; in set_efer() 1747 efer |= vcpu->arch.efer & EFER_LMA; in set_efer() 11859 if (!(sregs->cr4 & X86_CR4_PAE) || !(sregs->efer & EFER_LMA)) in kvm_is_valid_sregs() 11868 if (sregs->efer & EFER_LMA || sregs->cs.l) in kvm_is_valid_sregs() 11976 !(sregs2->efer & EFER_LMA); in __set_sregs2()
2261 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer() 2263 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer() 2483 if (guest_efer & EFER_LMA) in prepare_vmcs02_early() 3024 !!(vcpu->arch.efer & EFER_LMA))) in nested_vmx_check_address_space_size() 3102 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state() 3203 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state() 4708 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state() 4710 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
1065 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer() 1067 if (guest_efer & EFER_LMA) in update_transition_efer() 1078 if (!(guest_efer & EFER_LMA)) in update_transition_efer() 3122 if (efer & EFER_LMA) in vmx_set_efer() 3127 if (KVM_BUG_ON(efer & EFER_LMA, vcpu->kvm)) in vmx_set_efer() 3151 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode() 3156 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode() 6242 vcpu->arch.efer | (EFER_LMA | EFER_LME)); in dump_vmcs() 6245 vcpu->arch.efer & ~(EFER_LMA | EFER_LME)); in dump_vmcs()
686 vcpu->arch.efer = EFER_SCE | EFER_LME | EFER_LMA | EFER_NX; in tdx_vcpu_create()
33 #define EFER_LMA (1<<_EFER_LMA) macro
219 if (!(efer & EFER_LMA)) in svm_set_efer() 1753 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0() 1755 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0() 1759 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0() 1761 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
509 sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX); in vcpu_init_sregs()
215 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, lma, EFER_LMA);