Home
last modified time | relevance | path

Searched full:pmu (Results 1 – 25 of 1125) sorted by relevance

12345678910>>...45

/linux/Documentation/devicetree/bindings/arm/
H A Dpmu.yaml4 $id: http://devicetree.org/schemas/arm/pmu.yaml#
14 ARM cores often have a PMU for counting cpu and cache events like cache misses
15 and hits. The interface to the PMU is part of the ARM ARM. The ARM PMU
22 - apm,potenza-pmu
23 - apple,avalanche-pmu
24 - apple,blizzard-pmu
25 - apple,firestorm-pmu
26 - apple,icestorm-pmu
28 - arm,arm1136-pmu
[all...]
/linux/tools/perf/util/
H A Dpmus.c18 #include "pmu.h"
26 * core_pmus: A PMU belongs to core_pmus if it's name is "cpu" or it's sysfs
28 * must have pmu->is_core=1. If there are more than one PMU in
31 * homogeneous PMU, and thus they are treated as homogeneous
34 * matter whether PMU is present per SMT-thread or outside of the
38 * must have pmu->is_core=0 but pmu->is_uncore could be 0 or 1.
83 * that S390's cpum_cf PMU doesn't match. in pmu_name_len_no_suffix()
113 struct perf_pmu *pmu, *tm in perf_pmus__destroy() local
130 struct perf_pmu *pmu; pmu_find() local
148 struct perf_pmu *pmu; perf_pmus__find() local
201 struct perf_pmu *pmu; perf_pmu__find2() local
293 struct perf_pmu *pmu; __perf_pmus__find_by_type() local
310 struct perf_pmu *pmu = __perf_pmus__find_by_type(type); perf_pmus__find_by_type() local
334 perf_pmus__scan(struct perf_pmu * pmu) perf_pmus__scan() argument
354 perf_pmus__scan_core(struct perf_pmu * pmu) perf_pmus__scan_core() argument
366 perf_pmus__scan_for_event(struct perf_pmu * pmu,const char * event) perf_pmus__scan_for_event() argument
405 perf_pmus__scan_matching_wildcard(struct perf_pmu * pmu,const char * wildcard) perf_pmus__scan_matching_wildcard() argument
452 perf_pmus__scan_skip_duplicates(struct perf_pmu * pmu) perf_pmus__scan_skip_duplicates() argument
491 struct perf_pmu *pmu = NULL; perf_pmus__pmu_for_pmu_filter() local
513 const struct perf_pmu *pmu; global() member
606 struct perf_pmu *pmu; perf_pmus__print_pmu_events() local
721 struct perf_pmu *pmu = NULL; perf_pmus__print_raw_pmu_events() local
770 struct perf_pmu *pmu = perf_pmus__find(pname); perf_pmus__have_event() local
780 struct perf_pmu *pmu = NULL; perf_pmus__num_core_pmus() local
790 struct perf_pmu *pmu = NULL; __perf_pmus__supports_extended_type() local
821 struct perf_pmu *pmu = perf_pmus__find_by_type(attr->type); perf_pmus__find_by_attr() local
846 struct perf_pmu *pmu = evsel->pmu; evsel__find_pmu() local
[all...]
H A Dpmu.c22 #include "pmu.h"
28 #include <util/pmu-bison.h>
29 #include <util/pmu-flex.h>
43 /* An event loaded from /sys/bus/event_source/devices/<pmu>/events. */
48 * An event loaded from a /sys/bus/event_source/devices/<pmu>/identifier matched json
56 * pmu-events.c, created by parsing the pmu-events json files.
74 * differ from the PMU name as it won't have suffixes.
135 static int pmu_aliases_parse(struct perf_pmu *pmu);
178 static void perf_pmu_format__load(const struct perf_pmu *pmu, struc argument
204 perf_pmu__format_parse(struct perf_pmu * pmu,int dirfd,bool eager_load) perf_pmu__format_parse() argument
252 pmu_format(struct perf_pmu * pmu,int dirfd,const char * name,bool eager_load) pmu_format() argument
309 perf_pmu__parse_scale(struct perf_pmu * pmu,struct perf_pmu_alias * alias) perf_pmu__parse_scale() argument
345 perf_pmu__parse_unit(struct perf_pmu * pmu,struct perf_pmu_alias * alias) perf_pmu__parse_unit() argument
411 perf_pmu__parse_per_pkg(struct perf_pmu * pmu,struct perf_pmu_alias * alias) perf_pmu__parse_per_pkg() argument
416 perf_pmu__parse_snapshot(struct perf_pmu * pmu,struct perf_pmu_alias * alias) perf_pmu__parse_snapshot() argument
436 perf_pmu__del_aliases(struct perf_pmu * pmu) perf_pmu__del_aliases() argument
451 perf_pmu__find_alias(struct perf_pmu * pmu,const char * name,bool load) perf_pmu__find_alias() argument
508 read_alias_info(struct perf_pmu * pmu,struct perf_pmu_alias * alias) read_alias_info() argument
523 struct perf_pmu *pmu; global() member
565 perf_pmu__new_alias(struct perf_pmu * pmu,const char * name,const char * desc,const char * val,FILE * val_fd,const struct pmu_event * pe,enum event_source src) perf_pmu__new_alias() argument
687 __pmu_aliases_parse(struct perf_pmu * pmu,int events_dir_fd) __pmu_aliases_parse() argument
730 pmu_aliases_parse(struct perf_pmu * pmu) pmu_aliases_parse() argument
754 pmu_aliases_parse_eager(struct perf_pmu * pmu,int sysfs_fd) pmu_aliases_parse_eager() argument
889 pmu_deduped_name_len(const struct perf_pmu * pmu,const char * name,bool skip_duplicate_pmus) pmu_deduped_name_len() argument
1046 struct perf_pmu *pmu = vdata; pmu_add_cpu_aliases_map_callback() local
1057 pmu_add_cpu_aliases_table(struct perf_pmu * pmu,const struct pmu_events_table * table) pmu_add_cpu_aliases_table() argument
1062 pmu_add_cpu_aliases(struct perf_pmu * pmu) pmu_add_cpu_aliases() argument
1078 struct perf_pmu *pmu = vdata; pmu_add_sys_aliases_iter_fn() local
1104 pmu_add_sys_aliases(struct perf_pmu * pmu) pmu_add_sys_aliases() argument
1112 pmu_find_alias_name(struct perf_pmu * pmu,int dirfd) pmu_find_alias_name() argument
1135 pmu_max_precise(int dirfd,struct perf_pmu * pmu) pmu_max_precise() argument
1144 perf_pmu__arch_init(struct perf_pmu * pmu) perf_pmu__arch_init() argument
1168 perf_pmu__init(struct perf_pmu * pmu,__u32 type,const char * name) perf_pmu__init() argument
1214 struct perf_pmu *pmu; perf_pmu__lookup() local
1277 struct perf_pmu *pmu = zalloc(sizeof(*pmu)); perf_pmu__create_placeholder_core_pmu() local
1299 perf_pmu__is_fake(const struct perf_pmu * pmu) perf_pmu__is_fake() argument
1304 perf_pmu__warn_invalid_formats(struct perf_pmu * pmu) perf_pmu__warn_invalid_formats() argument
1330 struct perf_pmu *pmu; evsel__is_aux_event() local
1348 evsel__set_config_if_unset(struct perf_pmu * pmu,struct evsel * evsel,const char * config_name,u64 val) evsel__set_config_if_unset() argument
1380 perf_pmu__format_bits(struct perf_pmu * pmu,const char * name) perf_pmu__format_bits() argument
1395 perf_pmu__format_type(struct perf_pmu * pmu,const char * name) perf_pmu__format_type() argument
1493 pmu_config_term(const struct perf_pmu * pmu,struct perf_event_attr * attr,struct parse_events_term * term,struct parse_events_terms * head_terms,bool zero,bool apply_hardcoded,struct parse_events_error * err) pmu_config_term() argument
1651 perf_pmu__config_terms(const struct perf_pmu * pmu,struct perf_event_attr * attr,struct parse_events_terms * terms,bool zero,bool apply_hardcoded,struct parse_events_error * err) perf_pmu__config_terms() argument
1677 perf_pmu__config(struct perf_pmu * pmu,struct perf_event_attr * attr,struct parse_events_terms * head_terms,bool apply_hardcoded,struct parse_events_error * err) perf_pmu__config() argument
1691 pmu_find_alias(struct perf_pmu * pmu,struct parse_events_term * term) pmu_find_alias() argument
1730 check_info_data(struct perf_pmu * pmu,struct perf_pmu_alias * alias,struct perf_pmu_info * info,struct parse_events_error * err,int column) check_info_data() argument
1777 perf_pmu__check_alias(struct perf_pmu * pmu,struct parse_events_terms * head_terms,struct perf_pmu_info * info,bool * rewrote_terms,u64 * alternate_hw_config,struct parse_events_error * err) perf_pmu__check_alias() argument
1873 perf_pmu__find_event(struct perf_pmu * pmu,const char * event,void * state,pmu_event_callback cb) perf_pmu__find_event() argument
1897 perf_pmu__has_format(const struct perf_pmu * pmu,const char * name) perf_pmu__has_format() argument
1908 perf_pmu__for_each_format(struct perf_pmu * pmu,void * state,pmu_format_callback cb) perf_pmu__for_each_format() argument
1971 perf_pmu__supports_legacy_cache(const struct perf_pmu * pmu) perf_pmu__supports_legacy_cache() argument
1976 perf_pmu__auto_merge_stats(const struct perf_pmu * pmu) perf_pmu__auto_merge_stats() argument
1981 perf_pmu__have_event(struct perf_pmu * pmu,const char * name) perf_pmu__have_event() argument
2000 perf_pmu__num_events(struct perf_pmu * pmu) perf_pmu__num_events() argument
2035 format_alias(char * buf,int len,const struct perf_pmu * pmu,const struct perf_pmu_alias * alias,bool skip_duplicate_pmus) format_alias() argument
2063 perf_pmu__for_each_event(struct perf_pmu * pmu,bool skip_duplicate_pmus,void * state,pmu_event_callback cb) perf_pmu__for_each_event() argument
2147 perf_pmu___name_match(const struct perf_pmu * pmu,const char * to_match,bool wildcard) perf_pmu___name_match() argument
2212 perf_pmu__name_wildcard_match(const struct perf_pmu * pmu,const char * to_match) perf_pmu__name_wildcard_match() argument
2223 perf_pmu__name_no_suffix_match(const struct perf_pmu * pmu,const char * to_match) perf_pmu__name_no_suffix_match() argument
2228 perf_pmu__is_software(const struct perf_pmu * pmu) perf_pmu__is_software() argument
2255 perf_pmu__open_file(const struct perf_pmu * pmu,const char * name) perf_pmu__open_file() argument
2266 perf_pmu__open_file_at(const struct perf_pmu * pmu,int dirfd,const char * name) perf_pmu__open_file_at() argument
2277 perf_pmu__scan_file(const struct perf_pmu * pmu,const char * name,const char * fmt,...) perf_pmu__scan_file() argument
2294 perf_pmu__scan_file_at(const struct perf_pmu * pmu,int dirfd,const char * name,const char * fmt,...) perf_pmu__scan_file_at() argument
2311 perf_pmu__file_exists(const struct perf_pmu * pmu,const char * name) perf_pmu__file_exists() argument
2345 perf_pmu__del_caps(struct perf_pmu * pmu) perf_pmu__del_caps() argument
2357 perf_pmu__get_cap(struct perf_pmu * pmu,const char * name) perf_pmu__get_cap() argument
2373 perf_pmu__caps_parse(struct perf_pmu * pmu) perf_pmu__caps_parse() argument
2430 perf_pmu__compute_config_masks(struct perf_pmu * pmu) perf_pmu__compute_config_masks() argument
2453 perf_pmu__warn_invalid_config(struct perf_pmu * pmu,__u64 config,const char * name,int config_num,const char * config_name) perf_pmu__warn_invalid_config() argument
2479 perf_pmu__wildcard_match(const struct perf_pmu * pmu,const char * wildcard_to_match) perf_pmu__wildcard_match() argument
2556 perf_pmu__delete(struct perf_pmu * pmu) perf_pmu__delete() argument
2578 perf_pmu__name_from_config(struct perf_pmu * pmu,u64 config) perf_pmu__name_from_config() argument
[all...]
H A Dpmu.h12 #include "pmu-events/pmu-events.h"
55 /** @name: The name of the PMU such as "cpu". */
58 * @alias_name: Optional alternate name for the PMU determined in
63 * @id: Optional PMU identifier read from
73 * @selectable: Can the PMU name be selected as if it were an event?
77 * @is_core: Is the PMU the core CPU PMU? Determined by the name being
80 * PMU on systems like Intel hybrid.
84 * @is_uncore: Is the PMU no
208 const struct perf_pmu *pmu; global() member
[all...]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
H A Dbase.c32 struct nvkm_pmu *pmu = device->pmu; in nvkm_pmu_fan_controlled() local
34 /* Internal PMU FW does not currently control fans in any way, in nvkm_pmu_fan_controlled()
37 if (pmu && pmu->func->code.size) in nvkm_pmu_fan_controlled()
40 /* Default (board-loaded, or VBIOS PMU/PREOS) PMU FW on Fermi in nvkm_pmu_fan_controlled()
48 nvkm_pmu_pgob(struct nvkm_pmu *pmu, bool enable) in nvkm_pmu_pgob() argument
50 if (pmu && pmu in nvkm_pmu_pgob()
57 struct nvkm_pmu *pmu = container_of(work, typeof(*pmu), recv.work); nvkm_pmu_recv() local
62 nvkm_pmu_send(struct nvkm_pmu * pmu,u32 reply[2],u32 process,u32 message,u32 data0,u32 data1) nvkm_pmu_send() argument
73 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_intr() local
82 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_fini() local
96 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_init() local
107 struct nvkm_pmu *pmu = nvkm_pmu(subdev); nvkm_pmu_dtor() local
127 nvkm_pmu_ctor(const struct nvkm_pmu_fwif * fwif,struct nvkm_device * device,enum nvkm_subdev_type type,int inst,struct nvkm_pmu * pmu) nvkm_pmu_ctor() argument
163 struct nvkm_pmu *pmu; nvkm_pmu_new_() local
[all...]
H A Dgm20b.c28 #include <nvfw/pmu.h>
42 struct nvkm_pmu *pmu = container_of(falcon, typeof(*pmu), falcon); in gm20b_pmu_acr_bootstrap_falcon() local
52 ret = nvkm_falcon_cmdq_send(pmu->hpq, &cmd.cmd.hdr, in gm20b_pmu_acr_bootstrap_falcon()
54 &pmu->subdev, msecs_to_jiffies(1000)); in gm20b_pmu_acr_bootstrap_falcon()
129 struct nvkm_pmu *pmu = priv; in gm20b_pmu_acr_init_wpr_callback() local
130 struct nvkm_subdev *subdev = &pmu->subdev; in gm20b_pmu_acr_init_wpr_callback()
139 complete_all(&pmu->wpr_ready); in gm20b_pmu_acr_init_wpr_callback()
144 gm20b_pmu_acr_init_wpr(struct nvkm_pmu *pmu) in gm20b_pmu_acr_init_wpr() argument
154 return nvkm_falcon_cmdq_send(pmu in gm20b_pmu_acr_init_wpr()
159 gm20b_pmu_initmsg(struct nvkm_pmu * pmu) gm20b_pmu_initmsg() argument
185 gm20b_pmu_recv(struct nvkm_pmu * pmu) gm20b_pmu_recv() argument
201 gm20b_pmu_fini(struct nvkm_pmu * pmu) gm20b_pmu_fini() argument
215 gm20b_pmu_init(struct nvkm_pmu * pmu) gm20b_pmu_init() argument
251 gm20b_pmu_load(struct nvkm_pmu * pmu,int ver,const struct nvkm_pmu_fwif * fwif) gm20b_pmu_load() argument
[all...]
H A Dgt215.c30 gt215_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in gt215_pmu_send() argument
33 struct nvkm_subdev *subdev = &pmu->subdev; in gt215_pmu_send()
37 mutex_lock(&pmu->send.mutex); in gt215_pmu_send()
45 mutex_unlock(&pmu->send.mutex); in gt215_pmu_send()
50 * on a synchronous reply, take the PMU mutex and tell the in gt215_pmu_send()
54 pmu->recv.message = message; in gt215_pmu_send()
55 pmu->recv.process = process; in gt215_pmu_send()
65 pmu->send.base)); in gt215_pmu_send()
77 wait_event(pmu->recv.wait, (pmu in gt215_pmu_send()
87 gt215_pmu_recv(struct nvkm_pmu * pmu) gt215_pmu_recv() argument
139 gt215_pmu_intr(struct nvkm_pmu * pmu) gt215_pmu_intr() argument
178 gt215_pmu_fini(struct nvkm_pmu * pmu) gt215_pmu_fini() argument
185 gt215_pmu_reset(struct nvkm_pmu * pmu) gt215_pmu_reset() argument
195 gt215_pmu_enabled(struct nvkm_pmu * pmu) gt215_pmu_enabled() argument
201 gt215_pmu_init(struct nvkm_pmu * pmu) gt215_pmu_init() argument
[all...]
H A Dgk20a.c51 gk20a_pmu_dvfs_target(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_target() argument
53 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_target()
59 gk20a_pmu_dvfs_get_cur_state(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_get_cur_state() argument
61 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_cur_state()
67 gk20a_pmu_dvfs_get_target_state(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_target_state() argument
70 struct gk20a_pmu_dvfs_data *data = pmu->data; in gk20a_pmu_dvfs_get_target_state()
71 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_target_state()
86 nvkm_trace(&pmu->base.subdev, "cur level = %d, new level = %d\n", in gk20a_pmu_dvfs_get_target_state()
95 gk20a_pmu_dvfs_get_dev_status(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_dev_status() argument
98 struct nvkm_falcon *falcon = &pmu in gk20a_pmu_dvfs_get_dev_status()
105 gk20a_pmu_dvfs_reset_dev_status(struct gk20a_pmu * pmu) gk20a_pmu_dvfs_reset_dev_status() argument
116 struct gk20a_pmu *pmu = gk20a_pmu_dvfs_work() local
158 gk20a_pmu_fini(struct nvkm_pmu * pmu) gk20a_pmu_fini() argument
167 gk20a_pmu_init(struct nvkm_pmu * pmu) gk20a_pmu_init() argument
215 struct gk20a_pmu *pmu; gk20a_pmu_new() local
[all...]
/linux/Documentation/admin-guide/perf/
H A Dmrvl-odyssey-ddr-pmu.rst2 Marvell Odyssey DDR PMU Performance Monitoring Unit (PMU UNCORE)
16 The PMU driver exposes the available events and format options under sysfs::
24 mrvl_ddr_pmu_<>/ddr_act_bypass_access/ [Kernel PMU event]
25 mrvl_ddr_pmu_<>/ddr_bsm_alloc/ [Kernel PMU event]
26 mrvl_ddr_pmu_<>/ddr_bsm_starvation/ [Kernel PMU event]
27 mrvl_ddr_pmu_<>/ddr_cam_active_access/ [Kernel PMU event]
28 mrvl_ddr_pmu_<>/ddr_cam_mwr/ [Kernel PMU event]
29 mrvl_ddr_pmu_<>/ddr_cam_rd_active_access/ [Kernel PMU event]
30 mrvl_ddr_pmu_<>/ddr_cam_rd_or_wr_access/ [Kernel PMU even
[all...]
H A Dmrvl-pem-pmu.rst2 Marvell Odyssey PEM Performance Monitoring Unit (PMU UNCORE)
20 The PMU driver exposes the available events and format options under sysfs,
27 mrvl_pcie_rc_pmu_<>/ats_inv/ [Kernel PMU event]
28 mrvl_pcie_rc_pmu_<>/ats_inv_latency/ [Kernel PMU event]
29 mrvl_pcie_rc_pmu_<>/ats_pri/ [Kernel PMU event]
30 mrvl_pcie_rc_pmu_<>/ats_pri_latency/ [Kernel PMU event]
31 mrvl_pcie_rc_pmu_<>/ats_trans/ [Kernel PMU event]
32 mrvl_pcie_rc_pmu_<>/ats_trans_latency/ [Kernel PMU event]
33 mrvl_pcie_rc_pmu_<>/ib_inflight/ [Kernel PMU event]
34 mrvl_pcie_rc_pmu_<>/ib_reads/ [Kernel PMU even
[all...]
/linux/drivers/soc/dove/
H A Dpmu.c3 * Marvell Dove PMU support
17 #include <linux/soc/dove/pmu.h>
42 * The PMU contains a register to reset various subsystems within the
50 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_reset() local
54 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_reset()
55 val = readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
56 writel_relaxed(val & ~BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
57 writel_relaxed(val | BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
58 spin_unlock_irqrestore(&pmu->lock, flags); in pmu_reset_reset()
65 struct pmu_data *pmu in pmu_reset_assert() local
79 struct pmu_data *pmu = rcdev_to_pmu(rc); pmu_reset_deassert() local
103 pmu_reset_init(struct pmu_data * pmu) pmu_reset_init() argument
115 pmu_reset_init(struct pmu_data * pmu) pmu_reset_init() argument
121 struct pmu_data *pmu; global() member
146 struct pmu_data *pmu = pmu_dom->pmu; pmu_domain_power_off() local
180 struct pmu_data *pmu = pmu_dom->pmu; pmu_domain_power_on() local
228 struct pmu_data *pmu = irq_desc_get_handler_data(desc); pmu_irq_handler() local
265 dove_init_pmu_irq(struct pmu_data * pmu,int irq) dove_init_pmu_irq() argument
311 struct pmu_data *pmu; dove_init_pmu_legacy() local
375 struct pmu_data *pmu; dove_init_pmu() local
[all...]
/linux/drivers/perf/
H A Dfsl_imx8_ddr_perf.c52 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
66 const char *identifier; /* system PMU identifier for userspace */
101 { .compatible = "fsl,imx8-ddr-pmu", .data = &imx8_devtype_data},
102 { .compatible = "fsl,imx8m-ddr-pmu", .data = &imx8m_devtype_data},
103 { .compatible = "fsl,imx8mq-ddr-pmu", .data = &imx8mq_devtype_data},
104 { .compatible = "fsl,imx8mm-ddr-pmu", .data = &imx8mm_devtype_data},
105 { .compatible = "fsl,imx8mn-ddr-pmu", .data = &imx8mn_devtype_data},
106 { .compatible = "fsl,imx8mp-ddr-pmu", .data = &imx8mp_devtype_data},
107 { .compatible = "fsl,imx8dxl-ddr-pmu", .data = &imx8dxl_devtype_data},
113 struct pmu pm member
130 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_identifier_show() local
140 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_identifier_attr_visible() local
167 ddr_perf_filter_cap_get(struct ddr_pmu * pmu,int cap) ddr_perf_filter_cap_get() argument
190 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_filter_cap_show() local
221 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_cpumask_show() local
344 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_is_enhanced_filtered() local
351 ddr_perf_alloc_counter(struct ddr_pmu * pmu,int event) ddr_perf_alloc_counter() argument
375 ddr_perf_free_counter(struct ddr_pmu * pmu,int counter) ddr_perf_free_counter() argument
380 ddr_perf_read_counter(struct ddr_pmu * pmu,int counter) ddr_perf_read_counter() argument
397 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_init() local
442 ddr_perf_counter_enable(struct ddr_pmu * pmu,int config,int counter,bool enable) ddr_perf_counter_enable() argument
477 ddr_perf_counter_overflow(struct ddr_pmu * pmu,int counter) ddr_perf_counter_overflow() argument
486 ddr_perf_counter_clear(struct ddr_pmu * pmu,int counter) ddr_perf_counter_clear() argument
501 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_update() local
535 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_start() local
552 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_add() local
614 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_stop() local
630 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_del() local
640 ddr_perf_pmu_enable(struct pmu * pmu) ddr_perf_pmu_enable() argument
644 ddr_perf_pmu_disable(struct pmu * pmu) ddr_perf_pmu_disable() argument
648 ddr_perf_init(struct ddr_pmu * pmu,void __iomem * base,struct device * dev) ddr_perf_init() argument
678 struct ddr_pmu *pmu = (struct ddr_pmu *) p; ddr_perf_irq_handler() local
718 struct ddr_pmu *pmu = hlist_entry_safe(node, struct ddr_pmu, node); ddr_perf_offline_cpu() local
738 struct ddr_pmu *pmu; ddr_perf_probe() local
832 struct ddr_pmu *pmu = platform_get_drvdata(pdev); ddr_perf_remove() local
[all...]
H A Dfsl_imx9_ddr_perf.c59 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
79 const char *identifier; /* system PMU identifier for userspace */
84 struct pmu pmu; member
112 static inline bool axi_filter_v1(struct ddr_pmu *pmu) in axi_filter_v1() argument
114 return pmu->devtype_data->filter_ver == DDR_PERF_AXI_FILTER_V1; in axi_filter_v1()
117 static inline bool axi_filter_v2(struct ddr_pmu *pmu) in axi_filter_v2() argument
119 return pmu->devtype_data->filter_ver == DDR_PERF_AXI_FILTER_V2; in axi_filter_v2()
123 { .compatible = "fsl,imx91-ddr-pmu", .data = &imx91_devtype_data },
124 { .compatible = "fsl,imx93-ddr-pmu",
134 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_identifier_show() local
154 struct ddr_pmu *pmu = dev_get_drvdata(dev); ddr_perf_cpumask_show() local
317 struct pmu *pmu = dev_get_drvdata(kobj_to_dev(kobj)); ddr_perf_events_attrs_is_visible() local
365 ddr_perf_clear_counter(struct ddr_pmu * pmu,int counter) ddr_perf_clear_counter() argument
375 ddr_perf_read_counter(struct ddr_pmu * pmu,int counter) ddr_perf_read_counter() argument
398 ddr_perf_counter_global_config(struct ddr_pmu * pmu,bool enable) ddr_perf_counter_global_config() argument
432 ddr_perf_counter_local_config(struct ddr_pmu * pmu,int config,int counter,bool enable) ddr_perf_counter_local_config() argument
460 imx93_ddr_perf_monitor_config(struct ddr_pmu * pmu,int event,int counter,int axi_id,int axi_mask) imx93_ddr_perf_monitor_config() argument
486 imx95_ddr_perf_monitor_config(struct ddr_pmu * pmu,int event,int counter,int axi_id,int axi_mask) imx95_ddr_perf_monitor_config() argument
539 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_update() local
553 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_init() local
591 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_start() local
601 ddr_perf_alloc_counter(struct ddr_pmu * pmu,int event,int counter) ddr_perf_alloc_counter() argument
625 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_add() local
662 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_stop() local
674 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); ddr_perf_event_del() local
685 ddr_perf_pmu_enable(struct pmu * pmu) ddr_perf_pmu_enable() argument
692 ddr_perf_pmu_disable(struct pmu * pmu) ddr_perf_pmu_disable() argument
699 ddr_perf_init(struct ddr_pmu * pmu,void __iomem * base,struct device * dev) ddr_perf_init() argument
724 struct ddr_pmu *pmu = (struct ddr_pmu *)p; ddr_perf_irq_handler() local
755 struct ddr_pmu *pmu = hlist_entry_safe(node, struct ddr_pmu, node); ddr_perf_offline_cpu() local
775 struct ddr_pmu *pmu; ddr_perf_probe() local
858 struct ddr_pmu *pmu = platform_get_drvdata(pdev); ddr_perf_remove() local
[all...]
H A Dmarvell_cn10k_ddr_pmu.c148 struct pmu pmu; member
161 void (*enable_read_freerun_counter)(struct cn10k_ddr_pmu *pmu,
163 void (*enable_write_freerun_counter)(struct cn10k_ddr_pmu *pmu,
165 void (*clear_read_freerun_counter)(struct cn10k_ddr_pmu *pmu);
166 void (*clear_write_freerun_counter)(struct cn10k_ddr_pmu *pmu);
167 void (*pmu_overflow_handler)(struct cn10k_ddr_pmu *pmu, int evt_idx);
170 #define to_cn10k_ddr_pmu(p) container_of(p, struct cn10k_ddr_pmu, pmu)
365 struct cn10k_ddr_pmu *pmu = dev_get_drvdata(dev); in cn10k_ddr_perf_cpumask_show() local
367 return cpumap_print_to_pagebuf(true, buf, cpumask_of(pmu in cn10k_ddr_perf_cpumask_show()
438 cn10k_ddr_perf_alloc_counter(struct cn10k_ddr_pmu * pmu,struct perf_event * event) cn10k_ddr_perf_alloc_counter() argument
467 cn10k_ddr_perf_free_counter(struct cn10k_ddr_pmu * pmu,int counter) cn10k_ddr_perf_free_counter() argument
474 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_init() local
523 cn10k_ddr_perf_counter_enable(struct cn10k_ddr_pmu * pmu,int counter,bool enable) cn10k_ddr_perf_counter_enable() argument
572 cn10k_ddr_perf_read_counter(struct cn10k_ddr_pmu * pmu,int counter) cn10k_ddr_perf_read_counter() argument
592 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_update() local
609 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_start() local
622 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_add() local
668 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_stop() local
682 struct cn10k_ddr_pmu *pmu = to_cn10k_ddr_pmu(event->pmu); cn10k_ddr_perf_event_del() local
697 cn10k_ddr_perf_pmu_enable(struct pmu * pmu) cn10k_ddr_perf_pmu_enable() argument
706 cn10k_ddr_perf_pmu_disable(struct pmu * pmu) cn10k_ddr_perf_pmu_disable() argument
715 cn10k_ddr_perf_event_update_all(struct cn10k_ddr_pmu * pmu) cn10k_ddr_perf_event_update_all() argument
737 ddr_pmu_enable_read_freerun(struct cn10k_ddr_pmu * pmu,bool enable) ddr_pmu_enable_read_freerun() argument
751 ddr_pmu_enable_write_freerun(struct cn10k_ddr_pmu * pmu,bool enable) ddr_pmu_enable_write_freerun() argument
765 ddr_pmu_read_clear_freerun(struct cn10k_ddr_pmu * pmu) ddr_pmu_read_clear_freerun() argument
774 ddr_pmu_write_clear_freerun(struct cn10k_ddr_pmu * pmu) ddr_pmu_write_clear_freerun() argument
783 ddr_pmu_overflow_hander(struct cn10k_ddr_pmu * pmu,int evt_idx) ddr_pmu_overflow_hander() argument
790 ddr_pmu_ody_enable_read_freerun(struct cn10k_ddr_pmu * pmu,bool enable) ddr_pmu_ody_enable_read_freerun() argument
805 ddr_pmu_ody_enable_write_freerun(struct cn10k_ddr_pmu * pmu,bool enable) ddr_pmu_ody_enable_write_freerun() argument
820 ddr_pmu_ody_read_clear_freerun(struct cn10k_ddr_pmu * pmu) ddr_pmu_ody_read_clear_freerun() argument
829 ddr_pmu_ody_write_clear_freerun(struct cn10k_ddr_pmu * pmu) ddr_pmu_ody_write_clear_freerun() argument
838 ddr_pmu_ody_overflow_hander(struct cn10k_ddr_pmu * pmu,int evt_idx) ddr_pmu_ody_overflow_hander() argument
850 cn10k_ddr_pmu_overflow_handler(struct cn10k_ddr_pmu * pmu) cn10k_ddr_pmu_overflow_handler() argument
902 struct cn10k_ddr_pmu *pmu = container_of(hrtimer, struct cn10k_ddr_pmu, cn10k_ddr_pmu_timer_handler() local
916 struct cn10k_ddr_pmu *pmu = hlist_entry_safe(node, struct cn10k_ddr_pmu, cn10k_ddr_pmu_offline_cpu() local
[all...]
H A Darm_pmu_platform.c25 static int probe_current_pmu(struct arm_pmu *pmu, in probe_current_pmu() argument
32 pr_info("probing PMU on CPU %d\n", cpu); in probe_current_pmu()
37 ret = info->init(pmu); in probe_current_pmu()
45 static int pmu_parse_percpu_irq(struct arm_pmu *pmu, int irq) in pmu_parse_percpu_irq() argument
48 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_percpu_irq()
50 ret = irq_get_percpu_devid_partition(irq, &pmu->supported_cpus); in pmu_parse_percpu_irq()
54 for_each_cpu(cpu, &pmu->supported_cpus) in pmu_parse_percpu_irq()
95 static int pmu_parse_irqs(struct arm_pmu *pmu) in pmu_parse_irqs() argument
98 struct platform_device *pdev = pmu->plat_device; in pmu_parse_irqs()
99 struct pmu_hw_events __percpu *hw_events = pmu in pmu_parse_irqs()
192 struct arm_pmu *pmu; arm_pmu_device_probe() local
[all...]
H A DKconfig10 tristate "ARM CCI PMU driver"
14 Support for PMU events monitoring on the ARM CCI (Cache Coherent
41 PMU (perf) driver supporting the ARM CCN (Cache Coherent Network)
45 tristate "Arm CMN-600 PMU support"
48 Support for PMU events monitoring on the Arm CMN-600 Coherent Mesh
52 tristate "Arm NI-700 PMU support"
55 Support for PMU events monitoring on the Arm NI-700 Network-on-Chip
60 bool "ARM PMU framework"
80 bool "RISC-V PMU framework"
84 systems. This provides the core PMU framewor
[all...]
H A Darm_pmu.c185 if (type == event->pmu->type) in armpmu_map_event()
202 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_set_period()
244 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_update()
274 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_stop()
278 * ARM pmu always has to update the counter, so ignore in armpmu_stop()
290 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_start()
294 * ARM pmu always has to reprogram the period, so ignore in armpmu_start()
315 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_del()
324 perf_sched_cb_dec(event->pmu); in armpmu_del()
337 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_add()
373 validate_event(struct pmu * pmu,struct pmu_hw_events * hw_events,struct perf_event * event) validate_event() argument
529 armpmu_enable(struct pmu * pmu) armpmu_enable() argument
543 armpmu_disable(struct pmu * pmu) armpmu_disable() argument
559 armpmu_filter(struct pmu * pmu,int cpu) armpmu_filter() argument
693 armpmu_get_cpu_irq(struct arm_pmu * pmu,int cpu) armpmu_get_cpu_irq() argument
712 struct arm_pmu *pmu = hlist_entry_safe(node, struct arm_pmu, node); arm_perf_starting_cpu() local
731 struct arm_pmu *pmu = hlist_entry_safe(node, struct arm_pmu, node); arm_perf_teardown_cpu() local
861 struct arm_pmu *pmu; armpmu_alloc() local
914 armpmu_free(struct arm_pmu * pmu) armpmu_free() argument
920 armpmu_register(struct arm_pmu * pmu) armpmu_register() argument
[all...]
/linux/drivers/gpu/drm/i915/
H A Di915_pmu.c33 return container_of(event->pmu, struct i915_pmu, base); in event_to_pmu()
36 static struct drm_i915_private *pmu_to_i915(struct i915_pmu *pmu) in pmu_to_i915() argument
38 return container_of(pmu, struct drm_i915_private, pmu); in pmu_to_i915()
149 static bool pmu_needs_timer(struct i915_pmu *pmu) in pmu_needs_timer() argument
151 struct drm_i915_private *i915 = pmu_to_i915(pmu); in pmu_needs_timer()
159 enable = pmu->enable; in pmu_needs_timer()
201 static u64 read_sample(struct i915_pmu *pmu, unsigned int gt_id, int sample) in read_sample() argument
203 return pmu->sample[gt_id][sample].cur; in read_sample()
207 store_sample(struct i915_pmu *pmu, unsigne argument
213 add_sample_mult(struct i915_pmu * pmu,unsigned int gt_id,int sample,u32 val,u32 mul) add_sample_mult() argument
222 struct i915_pmu *pmu = &i915->pmu; get_rc6() local
259 init_rc6(struct i915_pmu * pmu) init_rc6() argument
281 struct i915_pmu *pmu = &gt->i915->pmu; park_rc6() local
287 __i915_pmu_maybe_start_timer(struct i915_pmu * pmu) __i915_pmu_maybe_start_timer() argument
300 struct i915_pmu *pmu = &gt->i915->pmu; i915_pmu_gt_parked() local
322 struct i915_pmu *pmu = &gt->i915->pmu; i915_pmu_gt_unparked() local
358 struct intel_engine_pmu *pmu = &engine->pmu; gen3_engine_sample() local
393 struct intel_engine_pmu *pmu = &engine->pmu; gen2_engine_sample() local
450 frequency_sampling_enabled(struct i915_pmu * pmu,unsigned int gt) frequency_sampling_enabled() argument
462 struct i915_pmu *pmu = &i915->pmu; frequency_sample() local
505 struct i915_pmu *pmu = container_of(hrtimer, struct i915_pmu, timer); i915_sample() local
541 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_event_destroy() local
608 struct i915_pmu *pmu = event_to_pmu(event); engine_event_init() local
622 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_event_init() local
659 struct i915_pmu *pmu = event_to_pmu(event); __i915_pmu_event_read() local
716 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_event_read() local
735 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_enable() local
798 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_disable() local
844 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_event_start() local
855 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_event_stop() local
871 struct i915_pmu *pmu = event_to_pmu(event); i915_pmu_event_add() local
979 create_event_attributes(struct i915_pmu * pmu) create_event_attributes() argument
1131 free_event_attributes(struct i915_pmu * pmu) free_event_attributes() argument
1149 struct i915_pmu *pmu = &i915->pmu; i915_pmu_register() local
1216 struct i915_pmu *pmu = &i915->pmu; i915_pmu_unregister() local
[all...]
/linux/Documentation/devicetree/bindings/arm/rockchip/
H A Dpmu.yaml4 $id: http://devicetree.org/schemas/arm/rockchip/pmu.yaml#
7 title: Rockchip Power Management Unit (PMU)
14 The PMU is used to turn on and off different power domains of the SoCs.
22 - rockchip,px30-pmu
23 - rockchip,rk3066-pmu
24 - rockchip,rk3128-pmu
25 - rockchip,rk3288-pmu
26 - rockchip,rk3368-pmu
27 - rockchip,rk3399-pmu
28 - rockchip,rk3528-pmu
[all...]
/linux/drivers/perf/amlogic/
H A Dmeson_ddr_pmu_core.c21 struct pmu pmu; member
35 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
38 static void dmc_pmu_enable(struct ddr_pmu *pmu) in dmc_pmu_enable() argument
40 if (!pmu->pmu_enabled) in dmc_pmu_enable()
41 pmu->info.hw_info->enable(&pmu->info); in dmc_pmu_enable()
43 pmu->pmu_enabled = true; in dmc_pmu_enable()
46 static void dmc_pmu_disable(struct ddr_pmu *pmu) in dmc_pmu_disable() argument
48 if (pmu in dmc_pmu_disable()
56 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_set_axi_filter() local
87 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_update() local
120 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_init() local
145 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_start() local
175 struct ddr_pmu *pmu = to_ddr_pmu(event->pmu); meson_ddr_perf_event_stop() local
192 struct ddr_pmu *pmu = dev_get_drvdata(dev); meson_ddr_perf_cpumask_show() local
294 struct pmu *pmu = dev_get_drvdata(kobj_to_dev(kobj)); meson_ddr_perf_format_attr_visible() local
322 struct ddr_pmu *pmu = dev_get_drvdata(dev); meson_ddr_perf_identifier_show() local
350 struct ddr_pmu *pmu; dmc_irq_handler() local
395 struct ddr_pmu *pmu = hlist_entry_safe(node, struct ddr_pmu, node); ddr_perf_offline_cpu() local
413 fill_event_attr(struct ddr_pmu * pmu) fill_event_attr() argument
486 struct ddr_pmu *pmu; meson_ddr_pmu_create() local
557 struct ddr_pmu *pmu = platform_get_drvdata(pdev); meson_ddr_pmu_remove() local
[all...]
/linux/arch/x86/kvm/vmx/
H A Dpmu_intel.c3 * KVM PMU support for Intel CPUs
22 #include "pmu.h"
57 static void reprogram_fixed_counters(struct kvm_pmu *pmu, u64 data) in reprogram_fixed_counters() argument
60 u64 old_fixed_ctr_ctrl = pmu->fixed_ctr_ctrl; in reprogram_fixed_counters()
63 pmu->fixed_ctr_ctrl = data; in reprogram_fixed_counters()
64 for (i = 0; i < pmu->nr_arch_fixed_counters; i++) { in reprogram_fixed_counters()
71 pmc = get_fixed_pmc(pmu, MSR_CORE_PERF_FIXED_CTR0 + i); in reprogram_fixed_counters()
73 __set_bit(KVM_FIXED_PMC_BASE_IDX + i, pmu->pmc_in_use); in reprogram_fixed_counters()
82 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_rdpmc_ecx_to_pmc() local
94 * Yell and reject attempts to read PMCs for a non-architectural PMU, in intel_rdpmc_ecx_to_pmc()
144 get_fw_gp_pmc(struct kvm_pmu * pmu,u32 msr) get_fw_gp_pmc() argument
188 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_is_valid_msr() local
219 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_msr_idx_to_pmc() local
246 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_create_guest_lbr_event() local
342 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_get_msr() local
385 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_set_msr() local
493 intel_pmu_enable_fixed_counter_bits(struct kvm_pmu * pmu,u64 bits) intel_pmu_enable_fixed_counter_bits() argument
503 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_refresh() local
615 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); intel_pmu_init() local
725 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); vmx_passthrough_lbr_msrs() local
759 intel_pmu_cross_mapped_check(struct kvm_pmu * pmu) intel_pmu_cross_mapped_check() argument
[all...]
/linux/drivers/gpu/drm/xe/
H A Dxe_pmu.c20 * DOC: Xe PMU (Performance Monitoring Unit)
93 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in event_to_gt()
101 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in event_to_hwe()
135 struct xe_device *xe = container_of(event->pmu, typeof(*xe), pmu.base); in event_gt_forcewake()
160 static bool event_supported(struct xe_pmu *pmu, unsigned int gt_id, in event_supported() argument
163 struct xe_device *xe = container_of(pmu, typeof(*xe), pmu); in event_supported()
235 struct xe_pmu *pmu = &xe->pmu; xe_pmu_event_init() local
339 struct xe_pmu *pmu = &xe->pmu; xe_pmu_event_read() local
362 struct xe_pmu *pmu = &xe->pmu; xe_pmu_event_start() local
374 struct xe_pmu *pmu = &xe->pmu; xe_pmu_event_stop() local
386 struct xe_pmu *pmu = &xe->pmu; xe_pmu_event_add() local
497 set_supported_events(struct xe_pmu * pmu) set_supported_events() argument
520 struct xe_pmu *pmu = arg; xe_pmu_unregister() local
538 xe_pmu_register(struct xe_pmu * pmu) xe_pmu_register() argument
[all...]
/linux/Documentation/devicetree/bindings/pinctrl/
H A Dmarvell,dove-pinctrl.txt9 - reg: register specifiers of MPP, MPP4, and PMU MPP registers
14 Note: pmu* also allows for Power Management functions listed below
18 mpp0 0 gpio, pmu, uart2(rts), sdio0(cd), lcd0(pwm), pmu*
19 mpp1 1 gpio, pmu, uart2(cts), sdio0(wp), lcd1(pwm), pmu*
20 mpp2 2 gpio, pmu, uart2(txd), sdio0(buspwr), sata(prsnt),
21 uart1(rts), pmu*
22 mpp3 3 gpio, pmu, uart2(rxd), sdio0(ledctrl), sata(act),
23 uart1(cts), lcd-spi(cs1), pmu*
[all...]
/linux/arch/x86/events/amd/
H A Duncore.c55 struct pmu pmu; member
96 return container_of(event->pmu, struct amd_uncore_pmu, pmu); in event_to_amd_uncore_pmu()
112 event->pmu->read(event); in amd_uncore_hrtimer()
165 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_start() local
166 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_start()
183 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_stop() local
184 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_stop()
191 event->pmu in amd_uncore_stop()
204 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); amd_uncore_add() local
251 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); amd_uncore_del() local
269 struct amd_uncore_pmu *pmu; amd_uncore_event_init() local
322 struct amd_uncore_pmu *pmu = container_of(ptr, struct amd_uncore_pmu, pmu); amd_uncore_attr_show_cpumask() local
474 struct amd_uncore_pmu *pmu; amd_uncore_ctx_free() local
502 struct amd_uncore_pmu *pmu; amd_uncore_ctx_init() local
572 struct amd_uncore_pmu *pmu; amd_uncore_ctx_move() local
720 struct amd_uncore_pmu *pmu; amd_uncore_df_ctx_init() local
854 struct amd_uncore_pmu *pmu; amd_uncore_l3_ctx_init() local
943 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); amd_uncore_umc_start() local
1017 struct amd_uncore_pmu *pmu; amd_uncore_umc_ctx_init() local
1193 struct amd_uncore_pmu *pmu; amd_uncore_exit() local
[all...]
/linux/tools/perf/arch/arm/util/
H A Dpmu.c8 #include <linux/coresight-pmu.h>
15 #include "../../../util/pmu.h"
19 void perf_pmu__arch_init(struct perf_pmu *pmu) in perf_pmu__arch_init() argument
24 if (!strcmp(pmu->name, CORESIGHT_ETM_PMU_NAME)) { in perf_pmu__arch_init()
26 pmu->auxtrace = true; in perf_pmu__arch_init()
27 pmu->selectable = true; in perf_pmu__arch_init()
28 pmu->perf_event_attr_init_default = cs_etm_get_default_config; in perf_pmu__arch_init()
30 } else if (strstarts(pmu->name, ARM_SPE_PMU_NAME)) { in perf_pmu__arch_init()
31 pmu->auxtrace = true; in perf_pmu__arch_init()
32 pmu in perf_pmu__arch_init()
[all...]

12345678910>>...45