Lines Matching full:vcpu

48 u64 kvm_pmu_get_counter_value(struct kvm_vcpu *vcpu, u64 select_idx);
49 void kvm_pmu_set_counter_value(struct kvm_vcpu *vcpu, u64 select_idx, u64 val);
50 u64 kvm_pmu_valid_counter_mask(struct kvm_vcpu *vcpu);
51 u64 kvm_pmu_get_pmceid(struct kvm_vcpu *vcpu, bool pmceid1);
52 void kvm_pmu_vcpu_init(struct kvm_vcpu *vcpu);
53 void kvm_pmu_vcpu_reset(struct kvm_vcpu *vcpu);
54 void kvm_pmu_vcpu_destroy(struct kvm_vcpu *vcpu);
55 void kvm_pmu_disable_counter_mask(struct kvm_vcpu *vcpu, u64 val);
56 void kvm_pmu_enable_counter_mask(struct kvm_vcpu *vcpu, u64 val);
57 void kvm_pmu_flush_hwstate(struct kvm_vcpu *vcpu);
58 void kvm_pmu_sync_hwstate(struct kvm_vcpu *vcpu);
59 bool kvm_pmu_should_notify_user(struct kvm_vcpu *vcpu);
60 void kvm_pmu_update_run(struct kvm_vcpu *vcpu);
61 void kvm_pmu_software_increment(struct kvm_vcpu *vcpu, u64 val);
62 void kvm_pmu_handle_pmcr(struct kvm_vcpu *vcpu, u64 val);
63 void kvm_pmu_set_counter_event_type(struct kvm_vcpu *vcpu, u64 data,
65 void kvm_vcpu_reload_pmu(struct kvm_vcpu *vcpu);
66 int kvm_arm_pmu_v3_set_attr(struct kvm_vcpu *vcpu,
68 int kvm_arm_pmu_v3_get_attr(struct kvm_vcpu *vcpu,
70 int kvm_arm_pmu_v3_has_attr(struct kvm_vcpu *vcpu,
72 int kvm_arm_pmu_v3_enable(struct kvm_vcpu *vcpu);
75 void kvm_vcpu_pmu_restore_guest(struct kvm_vcpu *vcpu);
76 void kvm_vcpu_pmu_restore_host(struct kvm_vcpu *vcpu);
79 #define kvm_vcpu_has_pmu(vcpu) \ argument
80 (vcpu_has_feature(vcpu, KVM_ARM_VCPU_PMU_V3))
83 * Updates the vcpu's view of the pmu events for this cpu.
84 * Must be called before every vcpu run after disabling interrupts, to ensure
87 #define kvm_pmu_update_vcpu_events(vcpu) \ argument
89 if (!has_vhe() && kvm_vcpu_has_pmu(vcpu)) \
90 vcpu->arch.pmu.events = *kvm_get_pmu_events(); \
96 #define kvm_pmu_is_3p5(vcpu) ({ \ argument
97 u64 val = IDREG(vcpu->kvm, SYS_ID_AA64DFR0_EL1); \
108 u64 kvm_vcpu_read_pmcr(struct kvm_vcpu *vcpu);
119 static inline u64 kvm_pmu_get_counter_value(struct kvm_vcpu *vcpu, in kvm_pmu_get_counter_value() argument
124 static inline void kvm_pmu_set_counter_value(struct kvm_vcpu *vcpu, in kvm_pmu_set_counter_value() argument
126 static inline u64 kvm_pmu_valid_counter_mask(struct kvm_vcpu *vcpu) in kvm_pmu_valid_counter_mask() argument
130 static inline void kvm_pmu_vcpu_init(struct kvm_vcpu *vcpu) {} in kvm_pmu_vcpu_init() argument
131 static inline void kvm_pmu_vcpu_reset(struct kvm_vcpu *vcpu) {} in kvm_pmu_vcpu_reset() argument
132 static inline void kvm_pmu_vcpu_destroy(struct kvm_vcpu *vcpu) {} in kvm_pmu_vcpu_destroy() argument
133 static inline void kvm_pmu_disable_counter_mask(struct kvm_vcpu *vcpu, u64 val) {} in kvm_pmu_disable_counter_mask() argument
134 static inline void kvm_pmu_enable_counter_mask(struct kvm_vcpu *vcpu, u64 val) {} in kvm_pmu_enable_counter_mask() argument
135 static inline void kvm_pmu_flush_hwstate(struct kvm_vcpu *vcpu) {} in kvm_pmu_flush_hwstate() argument
136 static inline void kvm_pmu_sync_hwstate(struct kvm_vcpu *vcpu) {} in kvm_pmu_sync_hwstate() argument
137 static inline bool kvm_pmu_should_notify_user(struct kvm_vcpu *vcpu) in kvm_pmu_should_notify_user() argument
141 static inline void kvm_pmu_update_run(struct kvm_vcpu *vcpu) {} in kvm_pmu_update_run() argument
142 static inline void kvm_pmu_software_increment(struct kvm_vcpu *vcpu, u64 val) {} in kvm_pmu_software_increment() argument
143 static inline void kvm_pmu_handle_pmcr(struct kvm_vcpu *vcpu, u64 val) {} in kvm_pmu_handle_pmcr() argument
144 static inline void kvm_pmu_set_counter_event_type(struct kvm_vcpu *vcpu, in kvm_pmu_set_counter_event_type() argument
146 static inline int kvm_arm_pmu_v3_set_attr(struct kvm_vcpu *vcpu, in kvm_arm_pmu_v3_set_attr() argument
151 static inline int kvm_arm_pmu_v3_get_attr(struct kvm_vcpu *vcpu, in kvm_arm_pmu_v3_get_attr() argument
156 static inline int kvm_arm_pmu_v3_has_attr(struct kvm_vcpu *vcpu, in kvm_arm_pmu_v3_has_attr() argument
161 static inline int kvm_arm_pmu_v3_enable(struct kvm_vcpu *vcpu) in kvm_arm_pmu_v3_enable() argument
165 static inline u64 kvm_pmu_get_pmceid(struct kvm_vcpu *vcpu, bool pmceid1) in kvm_pmu_get_pmceid() argument
170 #define kvm_vcpu_has_pmu(vcpu) ({ false; }) argument
171 #define kvm_pmu_is_3p5(vcpu) ({ false; }) argument
172 static inline void kvm_pmu_update_vcpu_events(struct kvm_vcpu *vcpu) {} in kvm_pmu_update_vcpu_events() argument
173 static inline void kvm_vcpu_pmu_restore_guest(struct kvm_vcpu *vcpu) {} in kvm_vcpu_pmu_restore_guest() argument
174 static inline void kvm_vcpu_pmu_restore_host(struct kvm_vcpu *vcpu) {} in kvm_vcpu_pmu_restore_host() argument
175 static inline void kvm_vcpu_reload_pmu(struct kvm_vcpu *vcpu) {} in kvm_vcpu_reload_pmu() argument
196 static inline u64 kvm_vcpu_read_pmcr(struct kvm_vcpu *vcpu) in kvm_vcpu_read_pmcr() argument