Home
last modified time | relevance | path

Searched refs:pmu (Results 1 – 25 of 765) sorted by relevance

12345678910>>...31

/linux/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
H A Dbase.c32 struct nvkm_pmu *pmu = device->pmu; in nvkm_pmu_fan_controlled() local
37 if (pmu && pmu->func->code.size) in nvkm_pmu_fan_controlled()
48 nvkm_pmu_pgob(struct nvkm_pmu *pmu, bool enable) in nvkm_pmu_pgob() argument
50 if (pmu && pmu->func->pgob) in nvkm_pmu_pgob()
51 pmu->func->pgob(pmu, enable); in nvkm_pmu_pgob()
57 struct nvkm_pmu *pmu = container_of(work, typeof(*pmu), recv.work); in nvkm_pmu_recv() local
58 return pmu->func->recv(pmu); in nvkm_pmu_recv()
62 nvkm_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in nvkm_pmu_send() argument
65 if (!pmu || !pmu->func->send) in nvkm_pmu_send()
67 return pmu->func->send(pmu, reply, process, message, data0, data1); in nvkm_pmu_send()
[all …]
H A Dgt215.c30 gt215_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in gt215_pmu_send() argument
33 struct nvkm_subdev *subdev = &pmu->subdev; in gt215_pmu_send()
37 mutex_lock(&pmu->send.mutex); in gt215_pmu_send()
45 mutex_unlock(&pmu->send.mutex); in gt215_pmu_send()
54 pmu->recv.message = message; in gt215_pmu_send()
55 pmu->recv.process = process; in gt215_pmu_send()
65 pmu->send.base)); in gt215_pmu_send()
77 wait_event(pmu->recv.wait, (pmu->recv.process == 0)); in gt215_pmu_send()
78 reply[0] = pmu->recv.data[0]; in gt215_pmu_send()
79 reply[1] = pmu->recv.data[1]; in gt215_pmu_send()
[all …]
H A Dgk20a.c51 gk20a_pmu_dvfs_target(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_target() argument
53 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_target()
59 gk20a_pmu_dvfs_get_cur_state(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_get_cur_state() argument
61 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_cur_state()
67 gk20a_pmu_dvfs_get_target_state(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_target_state() argument
70 struct gk20a_pmu_dvfs_data *data = pmu->data; in gk20a_pmu_dvfs_get_target_state()
71 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_target_state()
86 nvkm_trace(&pmu->base.subdev, "cur level = %d, new level = %d\n", in gk20a_pmu_dvfs_get_target_state()
95 gk20a_pmu_dvfs_get_dev_status(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_dev_status() argument
98 struct nvkm_falcon *falcon = &pmu->base.falcon; in gk20a_pmu_dvfs_get_dev_status()
[all …]
H A Dgm20b.c42 struct nvkm_pmu *pmu = container_of(falcon, typeof(*pmu), falcon); in gm20b_pmu_acr_bootstrap_falcon() local
52 ret = nvkm_falcon_cmdq_send(pmu->hpq, &cmd.cmd.hdr, in gm20b_pmu_acr_bootstrap_falcon()
54 &pmu->subdev, msecs_to_jiffies(1000)); in gm20b_pmu_acr_bootstrap_falcon()
129 struct nvkm_pmu *pmu = priv; in gm20b_pmu_acr_init_wpr_callback() local
130 struct nvkm_subdev *subdev = &pmu->subdev; in gm20b_pmu_acr_init_wpr_callback()
139 complete_all(&pmu->wpr_ready); in gm20b_pmu_acr_init_wpr_callback()
144 gm20b_pmu_acr_init_wpr(struct nvkm_pmu *pmu) in gm20b_pmu_acr_init_wpr() argument
154 return nvkm_falcon_cmdq_send(pmu->hpq, &cmd.cmd.hdr, in gm20b_pmu_acr_init_wpr()
155 gm20b_pmu_acr_init_wpr_callback, pmu, 0); in gm20b_pmu_acr_init_wpr()
159 gm20b_pmu_initmsg(struct nvkm_pmu *pmu) in gm20b_pmu_initmsg() argument
[all …]
/linux/tools/perf/util/
H A Dpmus.c113 struct perf_pmu *pmu, *tmp; in perf_pmus__destroy() local
115 list_for_each_entry_safe(pmu, tmp, &core_pmus, list) { in perf_pmus__destroy()
116 list_del(&pmu->list); in perf_pmus__destroy()
118 perf_pmu__delete(pmu); in perf_pmus__destroy()
120 list_for_each_entry_safe(pmu, tmp, &other_pmus, list) { in perf_pmus__destroy()
121 list_del(&pmu->list); in perf_pmus__destroy()
123 perf_pmu__delete(pmu); in perf_pmus__destroy()
130 struct perf_pmu *pmu; in pmu_find() local
132 list_for_each_entry(pmu, &core_pmus, list) { in pmu_find()
133 if (!strcmp(pmu->name, name) || in pmu_find()
[all …]
H A Dpmu.c135 static int pmu_aliases_parse(struct perf_pmu *pmu);
178 static void perf_pmu_format__load(const struct perf_pmu *pmu, struct perf_pmu_format *format) in perf_pmu_format__load() argument
186 if (!perf_pmu__pathname_scnprintf(path, sizeof(path), pmu->name, "format")) in perf_pmu_format__load()
204 static int perf_pmu__format_parse(struct perf_pmu *pmu, int dirfd, bool eager_load) in perf_pmu__format_parse() argument
219 format = perf_pmu__new_format(&pmu->format, name); in perf_pmu__format_parse()
252 static int pmu_format(struct perf_pmu *pmu, int dirfd, const char *name, bool eager_load) in pmu_format() argument
261 if (perf_pmu__format_parse(pmu, fd, eager_load)) in pmu_format()
309 static int perf_pmu__parse_scale(struct perf_pmu *pmu, struct perf_pmu_alias *alias) in perf_pmu__parse_scale() argument
321 scnprintf(path + len, sizeof(path) - len, "%s/events/%s.scale", pmu->name, alias->name); in perf_pmu__parse_scale()
345 static int perf_pmu__parse_unit(struct perf_pmu *pmu, struct perf_pmu_alias *alias) in perf_pmu__parse_unit() argument
[all …]
H A Dpmu.h112 void (*perf_event_attr_init_default)(const struct perf_pmu *pmu,
208 const struct perf_pmu *pmu; member
226 void pmu_add_sys_aliases(struct perf_pmu *pmu);
227 int perf_pmu__config(struct perf_pmu *pmu, struct perf_event_attr *attr,
231 int perf_pmu__config_terms(const struct perf_pmu *pmu,
236 __u64 perf_pmu__format_bits(struct perf_pmu *pmu, const char *name);
237 int perf_pmu__format_type(struct perf_pmu *pmu, const char *name);
238 int perf_pmu__check_alias(struct perf_pmu *pmu, struct parse_events_terms *head_terms,
241 int perf_pmu__find_event(struct perf_pmu *pmu, const char *event, void *state, pmu_event_callback c…
244 bool perf_pmu__has_format(const struct perf_pmu *pmu, const char *name);
[all …]
/linux/drivers/perf/
H A Dfsl_imx8_ddr_perf.c52 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
113 struct pmu pmu; member
130 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_identifier_show() local
132 return sysfs_emit(page, "%s\n", pmu->devtype_data->identifier); in ddr_perf_identifier_show()
140 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_identifier_attr_visible() local
142 if (!pmu->devtype_data->identifier) in ddr_perf_identifier_attr_visible()
167 static u32 ddr_perf_filter_cap_get(struct ddr_pmu *pmu, int cap) in ddr_perf_filter_cap_get() argument
169 u32 quirks = pmu->devtype_data->quirks; in ddr_perf_filter_cap_get()
190 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_filter_cap_show() local
195 return sysfs_emit(buf, "%u\n", ddr_perf_filter_cap_get(pmu, cap)); in ddr_perf_filter_cap_show()
[all …]
H A Dfsl_imx9_ddr_perf.c59 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
84 struct pmu pmu; member
117 static inline bool axi_filter_v1(struct ddr_pmu *pmu) in axi_filter_v1() argument
119 return pmu->devtype_data->filter_ver == DDR_PERF_AXI_FILTER_V1; in axi_filter_v1()
122 static inline bool axi_filter_v2(struct ddr_pmu *pmu) in axi_filter_v2() argument
124 return pmu->devtype_data->filter_ver == DDR_PERF_AXI_FILTER_V2; in axi_filter_v2()
140 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_identifier_show() local
142 return sysfs_emit(page, "%s\n", pmu->devtype_data->identifier); in ddr_perf_identifier_show()
160 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_cpumask_show() local
162 return cpumap_print_to_pagebuf(true, buf, cpumask_of(pmu->cpu)); in ddr_perf_cpumask_show()
[all …]
H A Dmarvell_cn10k_ddr_pmu.c148 struct pmu pmu; member
161 void (*enable_read_freerun_counter)(struct cn10k_ddr_pmu *pmu,
163 void (*enable_write_freerun_counter)(struct cn10k_ddr_pmu *pmu,
165 void (*clear_read_freerun_counter)(struct cn10k_ddr_pmu *pmu);
166 void (*clear_write_freerun_counter)(struct cn10k_ddr_pmu *pmu);
167 void (*pmu_overflow_handler)(struct cn10k_ddr_pmu *pmu, int evt_idx);
170 #define to_cn10k_ddr_pmu(p) container_of(p, struct cn10k_ddr_pmu, pmu)
365 struct cn10k_ddr_pmu *pmu = dev_get_drvdata(dev); in cn10k_ddr_perf_cpumask_show() local
367 return cpumap_print_to_pagebuf(true, buf, cpumask_of(pmu->cpu)); in cn10k_ddr_perf_cpumask_show()
438 static int cn10k_ddr_perf_alloc_counter(struct cn10k_ddr_pmu *pmu, in cn10k_ddr_perf_alloc_counter() argument
[all …]
H A Darm_pmu_platform.c25 static int probe_current_pmu(struct arm_pmu *pmu, in probe_current_pmu() argument
37 ret = info->init(pmu); in probe_current_pmu()
45 static int pmu_parse_percpu_irq(struct arm_pmu *pmu, int irq) in pmu_parse_percpu_irq() argument
48 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_percpu_irq()
50 ret = irq_get_percpu_devid_partition(irq, &pmu->supported_cpus); in pmu_parse_percpu_irq()
54 for_each_cpu(cpu, &pmu->supported_cpus) in pmu_parse_percpu_irq()
95 static int pmu_parse_irqs(struct arm_pmu *pmu) in pmu_parse_irqs() argument
98 struct platform_device *pdev = pmu->plat_device; in pmu_parse_irqs()
99 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_irqs()
112 pmu->pmu.capabilities |= PERF_PMU_CAP_NO_INTERRUPT; in pmu_parse_irqs()
[all …]
H A Driscv_pmu_legacy.c110 static void pmu_legacy_init(struct riscv_pmu *pmu) in pmu_legacy_init() argument
114 pmu->cmask = BIT(RISCV_PMU_LEGACY_CYCLE) | in pmu_legacy_init()
116 pmu->ctr_start = pmu_legacy_ctr_start; in pmu_legacy_init()
117 pmu->ctr_stop = NULL; in pmu_legacy_init()
118 pmu->event_map = pmu_legacy_event_map; in pmu_legacy_init()
119 pmu->ctr_get_idx = pmu_legacy_ctr_get_idx; in pmu_legacy_init()
120 pmu->ctr_get_width = pmu_legacy_ctr_get_width; in pmu_legacy_init()
121 pmu->ctr_clear_idx = NULL; in pmu_legacy_init()
122 pmu->ctr_read = pmu_legacy_read_ctr; in pmu_legacy_init()
123 pmu->event_mapped = pmu_legacy_event_mapped; in pmu_legacy_init()
[all …]
/linux/drivers/gpu/drm/i915/
H A Di915_pmu.c33 return container_of(event->pmu, struct i915_pmu, base); in event_to_pmu()
36 static struct drm_i915_private *pmu_to_i915(struct i915_pmu *pmu) in pmu_to_i915() argument
38 return container_of(pmu, struct drm_i915_private, pmu); in pmu_to_i915()
149 static bool pmu_needs_timer(struct i915_pmu *pmu) in pmu_needs_timer() argument
151 struct drm_i915_private *i915 = pmu_to_i915(pmu); in pmu_needs_timer()
159 enable = pmu->enable; in pmu_needs_timer()
201 static u64 read_sample(struct i915_pmu *pmu, unsigned int gt_id, int sample) in read_sample() argument
203 return pmu->sample[gt_id][sample].cur; in read_sample()
207 store_sample(struct i915_pmu *pmu, unsigned int gt_id, int sample, u64 val) in store_sample() argument
209 pmu->sample[gt_id][sample].cur = val; in store_sample()
[all …]
/linux/drivers/soc/dove/
H A Dpmu.c50 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_reset() local
54 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_reset()
55 val = readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
56 writel_relaxed(val & ~BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
57 writel_relaxed(val | BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
58 spin_unlock_irqrestore(&pmu->lock, flags); in pmu_reset_reset()
65 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_assert() local
69 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_assert()
70 val &= readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_assert()
71 writel_relaxed(val, pmu->pmc_base + PMC_SW_RST); in pmu_reset_assert()
[all …]
/linux/drivers/pmdomain/starfive/
H A Djh71xx-pmu.c64 struct jh71xx_pmu *pmu);
81 struct jh71xx_pmu *pmu; member
87 struct jh71xx_pmu *pmu = pmd->pmu; in jh71xx_pmu_get_state() local
92 *is_on = readl(pmu->base + pmu->match_data->pmu_status) & mask; in jh71xx_pmu_get_state()
99 struct jh71xx_pmu *pmu = pmd->pmu; in jh7110_pmu_set_state() local
107 spin_lock_irqsave(&pmu->lock, flags); in jh7110_pmu_set_state()
129 writel(mask, pmu->base + mode); in jh7110_pmu_set_state()
139 writel(JH71XX_PMU_SW_ENCOURAGE_ON, pmu->base + JH71XX_PMU_SW_ENCOURAGE); in jh7110_pmu_set_state()
140 writel(encourage_lo, pmu->base + JH71XX_PMU_SW_ENCOURAGE); in jh7110_pmu_set_state()
141 writel(encourage_hi, pmu->base + JH71XX_PMU_SW_ENCOURAGE); in jh7110_pmu_set_state()
[all …]
/linux/arch/x86/kvm/svm/
H A Dpmu.c28 static struct kvm_pmc *amd_pmu_get_pmc(struct kvm_pmu *pmu, int pmc_idx) in amd_pmu_get_pmc() argument
30 unsigned int num_counters = pmu->nr_arch_gp_counters; in amd_pmu_get_pmc()
35 return &pmu->gp_counters[array_index_nospec(pmc_idx, num_counters)]; in amd_pmu_get_pmc()
38 static inline struct kvm_pmc *get_gp_pmc_amd(struct kvm_pmu *pmu, u32 msr, in get_gp_pmc_amd() argument
41 struct kvm_vcpu *vcpu = pmu_to_vcpu(pmu); in get_gp_pmc_amd()
44 if (!pmu->version) in get_gp_pmc_amd()
73 return amd_pmu_get_pmc(pmu, idx); in get_gp_pmc_amd()
78 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_check_rdpmc_early() local
80 if (idx >= pmu->nr_arch_gp_counters) in amd_check_rdpmc_early()
95 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_msr_idx_to_pmc() local
[all …]
/linux/drivers/perf/amlogic/
H A Dmeson_ddr_pmu_core.c21 struct pmu pmu; member
35 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
38 static void dmc_pmu_enable(struct ddr_pmu *pmu) in dmc_pmu_enable() argument
40 if (!pmu->pmu_enabled) in dmc_pmu_enable()
41 pmu->info.hw_info->enable(&pmu->info); in dmc_pmu_enable()
43 pmu->pmu_enabled = true; in dmc_pmu_enable()
46 static void dmc_pmu_disable(struct ddr_pmu *pmu) in dmc_pmu_disable() argument
48 if (pmu->pmu_enabled) in dmc_pmu_disable()
49 pmu->info.hw_info->disable(&pmu->info); in dmc_pmu_disable()
51 pmu->pmu_enabled = false; in dmc_pmu_disable()
[all …]
/linux/arch/x86/kvm/vmx/
H A Dpmu_intel.c57 static void reprogram_fixed_counters(struct kvm_pmu *pmu, u64 data) in reprogram_fixed_counters() argument
60 u64 old_fixed_ctr_ctrl = pmu->fixed_ctr_ctrl; in reprogram_fixed_counters()
63 pmu->fixed_ctr_ctrl = data; in reprogram_fixed_counters()
64 for (i = 0; i < pmu->nr_arch_fixed_counters; i++) { in reprogram_fixed_counters()
71 pmc = get_fixed_pmc(pmu, MSR_CORE_PERF_FIXED_CTR0 + i); in reprogram_fixed_counters()
73 __set_bit(KVM_FIXED_PMC_BASE_IDX + i, pmu->pmc_in_use); in reprogram_fixed_counters()
82 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_rdpmc_ecx_to_pmc() local
97 if (WARN_ON_ONCE(!pmu->version)) in intel_rdpmc_ecx_to_pmc()
110 counters = pmu->fixed_counters; in intel_rdpmc_ecx_to_pmc()
111 num_counters = pmu->nr_arch_fixed_counters; in intel_rdpmc_ecx_to_pmc()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_pmu.c93 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in event_to_gt()
101 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in event_to_hwe()
135 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in event_gt_forcewake()
160 static bool event_supported(struct xe_pmu *pmu, unsigned int gt_id, in event_supported() argument
163 struct xe_device *xe = container_of(pmu, typeof(*xe), pmu); in event_supported()
169 return id < sizeof(pmu->supported_events) * BITS_PER_BYTE && in event_supported()
170 pmu->supported_events & BIT_ULL(id); in event_supported()
175 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in event_param_valid()
216 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in xe_pmu_event_destroy()
234 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in xe_pmu_event_init()
[all …]
/linux/tools/perf/arch/x86/util/
H A Dpmu.c107 static int uncore_cha_snc(struct perf_pmu *pmu) in uncore_cha_snc() argument
124 if (sscanf(pmu->name, "uncore_cha_%u", &cha_num) != 1) { in uncore_cha_snc()
125 pr_warning("Unexpected: unable to compute CHA number '%s'\n", pmu->name); in uncore_cha_snc()
135 static int uncore_imc_snc(struct perf_pmu *pmu) in uncore_imc_snc() argument
160 if (sscanf(pmu->name, "uncore_imc_%u", &imc_num) != 1) { in uncore_imc_snc()
161 pr_warning("Unexpected: unable to compute IMC number '%s'\n", pmu->name); in uncore_imc_snc()
208 static void gnr_uncore_cha_imc_adjust_cpumask_for_snc(struct perf_pmu *pmu, bool cha) in gnr_uncore_cha_imc_adjust_cpumask_for_snc() argument
233 if (perf_cpu_map__cpu(pmu->cpus, 0).cpu != 0) { in gnr_uncore_cha_imc_adjust_cpumask_for_snc()
234 pr_debug("Ignoring cpumask adjust for %s as unexpected first CPU\n", pmu->name); in gnr_uncore_cha_imc_adjust_cpumask_for_snc()
238 pmu_snc = cha ? uncore_cha_snc(pmu) : uncore_imc_snc(pmu); in gnr_uncore_cha_imc_adjust_cpumask_for_snc()
[all …]
/linux/arch/x86/events/amd/
H A Duncore.c55 struct pmu pmu; member
96 return container_of(event->pmu, struct amd_uncore_pmu, pmu); in event_to_amd_uncore_pmu()
112 event->pmu->read(event); in amd_uncore_hrtimer()
165 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_start() local
166 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_start()
183 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_stop() local
184 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_stop()
191 event->pmu->read(event); in amd_uncore_stop()
204 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_add() local
205 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_add()
[all …]
/linux/tools/perf/arch/arm/util/
H A Dpmu.c19 void perf_pmu__arch_init(struct perf_pmu *pmu) in perf_pmu__arch_init() argument
24 if (!strcmp(pmu->name, CORESIGHT_ETM_PMU_NAME)) { in perf_pmu__arch_init()
26 pmu->auxtrace = true; in perf_pmu__arch_init()
27 pmu->selectable = true; in perf_pmu__arch_init()
28 pmu->perf_event_attr_init_default = cs_etm_get_default_config; in perf_pmu__arch_init()
30 } else if (strstarts(pmu->name, ARM_SPE_PMU_NAME)) { in perf_pmu__arch_init()
31 pmu->auxtrace = true; in perf_pmu__arch_init()
32 pmu->selectable = true; in perf_pmu__arch_init()
33 pmu->is_uncore = false; in perf_pmu__arch_init()
34 pmu->perf_event_attr_init_default = arm_spe_pmu_default_config; in perf_pmu__arch_init()
[all …]
/linux/arch/x86/kvm/
H A Dpmu.h9 #define vcpu_to_pmu(vcpu) (&(vcpu)->arch.pmu)
10 #define pmu_to_vcpu(pmu) (container_of((pmu), struct kvm_vcpu, arch.pmu)) argument
11 #define pmc_to_pmu(pmc) (&(pmc)->vcpu->arch.pmu)
47 static inline bool kvm_pmu_has_perf_global_ctrl(struct kvm_pmu *pmu) in kvm_pmu_has_perf_global_ctrl() argument
58 return pmu->version > 1; in kvm_pmu_has_perf_global_ctrl()
75 static inline struct kvm_pmc *kvm_pmc_idx_to_pmc(struct kvm_pmu *pmu, int idx) in kvm_pmc_idx_to_pmc() argument
77 if (idx < pmu->nr_arch_gp_counters) in kvm_pmc_idx_to_pmc()
78 return &pmu->gp_counters[idx]; in kvm_pmc_idx_to_pmc()
81 if (idx >= 0 && idx < pmu->nr_arch_fixed_counters) in kvm_pmc_idx_to_pmc()
82 return &pmu->fixed_counters[idx]; in kvm_pmc_idx_to_pmc()
[all …]
/linux/tools/perf/tests/
H A Dpmu-events.c49 .pmu = "default_core",
60 .pmu = "default_core",
71 .pmu = "default_core",
82 .pmu = "default_core",
93 .pmu = "default_core",
104 .pmu = "default_core",
131 .pmu = "hisi_sccl,ddrc",
143 .pmu = "uncore_cbox",
155 .pmu = "uncore_cbox",
167 .pmu = "uncore_cbox",
[all …]
/linux/arch/x86/events/intel/
H A Duncore.h86 struct pmu *pmu; /* for custom pmu ops */ member
125 struct pmu pmu; member
154 struct intel_uncore_pmu *pmu; member
223 return container_of(dev_get_drvdata(dev), struct intel_uncore_pmu, pmu); in dev_to_uncore_pmu()
263 if (offset < box->pmu->type->mmio_map_size) in uncore_mmio_is_valid_offset()
267 offset, box->pmu->type->name); in uncore_mmio_is_valid_offset()
275 return box->pmu->type->box_ctl + in uncore_mmio_box_ctl()
276 box->pmu->type->mmio_offset * box->pmu->pmu_idx; in uncore_mmio_box_ctl()
281 return box->pmu->type->box_ctl; in uncore_pci_box_ctl()
286 return box->pmu->type->fixed_ctl; in uncore_pci_fixed_ctl()
[all …]

12345678910>>...31