Lines Matching defs:vmx
890 static int __find_msr_index(struct vcpu_vmx *vmx, u32 msr) in __find_msr_index()
926 static struct shared_msr_entry *find_msr_entry(struct vcpu_vmx *vmx, u32 msr) in find_msr_entry()
989 static inline void vpid_sync_vcpu_single(struct vcpu_vmx *vmx) in vpid_sync_vcpu_single()
1004 static inline void vpid_sync_context(struct vcpu_vmx *vmx) in vpid_sync_context()
1113 static void vmx_segment_cache_clear(struct vcpu_vmx *vmx) in vmx_segment_cache_clear()
1118 static bool vmx_segment_cache_test_set(struct vcpu_vmx *vmx, unsigned seg, in vmx_segment_cache_test_set()
1133 static u16 vmx_read_guest_seg_selector(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_selector()
1142 static ulong vmx_read_guest_seg_base(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_base()
1151 static u32 vmx_read_guest_seg_limit(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_limit()
1160 static u32 vmx_read_guest_seg_ar(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_ar()
1204 static void clear_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr) in clear_atomic_switch_msr()
1250 static void add_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr, in add_atomic_switch_msr()
1313 static bool update_transition_efer(struct vcpu_vmx *vmx, int efer_offset) in update_transition_efer()
1387 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_save_host_state() local
1435 static void __vmx_load_host_state(struct vcpu_vmx *vmx) in __vmx_load_host_state()
1465 static void vmx_load_host_state(struct vcpu_vmx *vmx) in vmx_load_host_state()
1478 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_load() local
1692 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_queue_exception() local
1732 static void move_msr_up(struct vcpu_vmx *vmx, int from, int to) in move_msr_up()
1746 static void setup_msrs(struct vcpu_vmx *vmx) in setup_msrs()
2167 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_msr() local
2673 struct vcpu_vmx *vmx = to_vmx(vcpu); in enter_pmode() local
2750 struct vcpu_vmx *vmx = to_vmx(vcpu); in enter_rmode() local
2815 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_efer() local
2965 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_cr0() local
3066 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_get_segment() local
3167 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_segment() local
3570 static void allocate_vpid(struct vcpu_vmx *vmx) in allocate_vpid()
3586 static void free_vpid(struct vcpu_vmx *vmx) in free_vpid()
3664 static void set_cr4_guest_host_mask(struct vcpu_vmx *vmx) in set_cr4_guest_host_mask()
3675 static u32 vmx_exec_control(struct vcpu_vmx *vmx) in vmx_exec_control()
3692 static u32 vmx_secondary_exec_control(struct vcpu_vmx *vmx) in vmx_secondary_exec_control()
3724 static int vmx_vcpu_setup(struct vcpu_vmx *vmx) in vmx_vcpu_setup()
3820 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_reset() local
3983 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_inject_irq() local
4011 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_inject_nmi() local
4062 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_nmi_mask() local
4192 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_exception() local
4670 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_task_switch() local
4872 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_invalid_guest_state() local
4939 static struct loaded_vmcs *nested_get_current_vmcs02(struct vcpu_vmx *vmx) in nested_get_current_vmcs02()
4975 static void nested_free_vmcs02(struct vcpu_vmx *vmx, gpa_t vmptr) in nested_free_vmcs02()
4993 static void nested_free_all_saved_vmcss(struct vcpu_vmx *vmx) in nested_free_all_saved_vmcss()
5019 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmon() local
5061 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_check_permission() local
5087 static void free_nested(struct vcpu_vmx *vmx) in free_nested()
5211 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmclear() local
5344 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_check_vmcs12() local
5475 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmptrld() local
5732 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_exit_handled() local
5840 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_handle_exit() local
5929 static void vmx_complete_atomic_exit(struct vcpu_vmx *vmx) in vmx_complete_atomic_exit()
5953 static void vmx_recover_nmi_blocking(struct vcpu_vmx *vmx) in vmx_recover_nmi_blocking()
5995 static void __vmx_complete_interrupts(struct vcpu_vmx *vmx, in __vmx_complete_interrupts()
6052 static void vmx_complete_interrupts(struct vcpu_vmx *vmx) in vmx_complete_interrupts()
6073 static void atomic_switch_perf_msrs(struct vcpu_vmx *vmx) in atomic_switch_perf_msrs()
6101 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_run() local