Searched refs:walk_mmu (Results 1 – 9 of 9) sorted by relevance
161 return vcpu->arch.walk_mmu->pdptrs[index]; in kvm_pdptr_read()166 vcpu->arch.walk_mmu->pdptrs[index] = value; in kvm_pdptr_write()
283 return vcpu->arch.walk_mmu == &vcpu->arch.nested_mmu; in mmu_is_nested()
995 vcpu->arch.walk_mmu; in kvm_inject_emulated_page_fault()1061 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in load_pdptrs()7843 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_mmu_gva_to_gpa_read()7853 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_mmu_gva_to_gpa_write()7865 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_mmu_gva_to_gpa_system()7874 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_read_guest_virt_helper()7907 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_fetch_guest_virt()7966 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in kvm_write_guest_virt_helper()8072 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in vcpu_mmio_gva_to_gpa()8082 !permission_fault(vcpu, vcpu->arch.walk_mmu, in vcpu_mmio_gva_to_gpa()[all …]
517 walker->fault.nested_page_fault = mmu != vcpu->arch.walk_mmu; in walk_addr_generic()870 WARN_ON_ONCE((addr >> 32) && mmu == vcpu->arch.walk_mmu); in gva_to_gpa()
6565 kvm_mmu_invalidate_addr(vcpu, vcpu->arch.walk_mmu, gva, KVM_MMU_ROOTS_ALL); in kvm_mmu_invlpg()6702 vcpu->arch.walk_mmu = &vcpu->arch.root_mmu; in kvm_mmu_create()
102 vcpu->arch.walk_mmu = &vcpu->arch.nested_mmu; in nested_svm_init_mmu_context()108 vcpu->arch.walk_mmu = &vcpu->arch.root_mmu; in nested_svm_uninit_mmu_context()
864 struct kvm_mmu *walk_mmu; member
489 vcpu->arch.walk_mmu = &vcpu->arch.nested_mmu; in nested_ept_init_mmu_context()495 vcpu->arch.walk_mmu = &vcpu->arch.root_mmu; in nested_ept_uninit_mmu_context()
3460 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in vmx_ept_load_pdptrs()3475 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in ept_save_pdptrs()