Home
last modified time | relevance | path

Searched refs:vcpu_to_pmu (Results 1 – 11 of 11) sorted by relevance

/linux/arch/x86/kvm/svm/
H A Dpmu.c78 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_check_rdpmc_early()
90 return amd_pmu_get_pmc(vcpu_to_pmu(vcpu), idx); in amd_rdpmc_ecx_to_pmc()
95 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_msr_idx_to_pmc()
106 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_is_valid_msr()
130 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_get_msr()
152 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_set_msr()
181 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_refresh()
219 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_init()
239 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_mediated_pmu_load()
253 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_mediated_pmu_put()
H A Dsvm.c737 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in svm_recalc_pmu_msr_intercepts()
4433 rdmsrq(MSR_AMD64_PERF_CNTR_GLOBAL_CTL, vcpu_to_pmu(vcpu)->global_ctrl); in svm_vcpu_run()
/linux/arch/x86/kvm/vmx/
H A Dpmu_intel.c83 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_rdpmc_ecx_to_pmc()
176 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_is_valid_msr()
207 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_msr_idx_to_pmc()
227 vcpu_to_pmu(vcpu)->event_count--; in intel_pmu_release_guest_lbr_event()
234 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_create_guest_lbr_event()
315 __set_bit(INTEL_PMC_IDX_FIXED_VLBR, vcpu_to_pmu(vcpu)->pmc_in_use); in intel_pmu_handle_lbr_msrs_access()
319 clear_bit(INTEL_PMC_IDX_FIXED_VLBR, vcpu_to_pmu(vcpu)->pmc_in_use); in intel_pmu_handle_lbr_msrs_access()
330 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_get_msr()
373 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_set_msr()
492 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_refresh()
[all …]
H A Dnested.c641 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in nested_vmx_merge_pmu_msr_bitmaps()
2831 kvm_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu)) && in prepare_vmcs02()
3174 CC(!kvm_valid_perf_global_ctrl(vcpu_to_pmu(vcpu), in nested_vmx_check_host_state()
3332 CC(!kvm_valid_perf_global_ctrl(vcpu_to_pmu(vcpu), in nested_vmx_check_guest_state()
4848 kvm_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu))) in load_vmcs12_host_state()
H A Dvmx.c4292 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in vmx_recalc_pmu_msr_intercepts()
7420 struct kvm_pmu *pmu = vcpu_to_pmu(&vmx->vcpu); in atomic_switch_perf_msrs()
7444 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in vmx_refresh_guest_perf_global_control()
/linux/arch/riscv/kvm/
H A Dvcpu_pmu.c209 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in pmu_fw_ctr_read_hi()
240 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in pmu_ctr_read()
282 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_pmu_overflow()
351 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_incr_fw()
368 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_read_hpm()
401 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_pmu_clear_snapshot_area()
412 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_snapshot_set_shmem()
515 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_num_ctrs()
525 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_info()
542 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_start()
[all …]
H A Dvcpu_sbi_pmu.c21 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_sbi_ext_pmu_handler()
88 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_sbi_ext_pmu_probe()
/linux/arch/x86/kvm/
H A Dpmu.c637 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_handle_event()
726 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_rdpmc()
751 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_need_any_pmc_intercept()
768 !kvm_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu)); in kvm_need_perf_global_ctrl_intercept()
774 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_need_rdpmc_intercept()
803 return kvm_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu)); in kvm_pmu_is_valid_msr()
813 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_mark_pmc_in_use()
822 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_get_msr()
848 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_set_msr()
917 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_reset()
[all …]
H A Dpmu.h9 #define vcpu_to_pmu(vcpu) (&(vcpu)->arch.pmu) macro
76 return enable_mediated_pmu && vcpu_to_pmu(vcpu)->version; in kvm_vcpu_has_mediated_pmu()
239 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_is_fastpath_emulation_allowed()
H A Dx86.c5165 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_arch_vcpu_load()
/linux/arch/riscv/include/asm/
H A Dkvm_vcpu_pmu.h63 #define vcpu_to_pmu(vcpu) (&(vcpu)->arch.pmu_context) macro