Lines Matching full:pmu

51 	struct pmu pmu;  member
88 return container_of(event->pmu, struct amd_uncore_pmu, pmu); in event_to_amd_uncore_pmu()
139 event->pmu->read(event); in amd_uncore_stop()
147 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_add() local
148 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_add()
155 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_add()
164 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_add()
175 hwc->config_base = pmu->msr_base + (2 * hwc->idx); in amd_uncore_add()
176 hwc->event_base = pmu->msr_base + 1 + (2 * hwc->idx); in amd_uncore_add()
177 hwc->event_base_rdpmc = pmu->rdpmc_base + hwc->idx; in amd_uncore_add()
180 if (pmu->rdpmc_base < 0) in amd_uncore_add()
184 event->pmu->start(event, PERF_EF_RELOAD); in amd_uncore_add()
192 struct amd_uncore_pmu *pmu = event_to_amd_uncore_pmu(event); in amd_uncore_del() local
193 struct amd_uncore_ctx *ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_del()
196 event->pmu->stop(event, PERF_EF_UPDATE); in amd_uncore_del()
198 for (i = 0; i < pmu->num_counters; i++) { in amd_uncore_del()
208 struct amd_uncore_pmu *pmu; in amd_uncore_event_init() local
212 if (event->attr.type != event->pmu->type) in amd_uncore_event_init()
218 pmu = event_to_amd_uncore_pmu(event); in amd_uncore_event_init()
219 ctx = *per_cpu_ptr(pmu->ctx, event->cpu); in amd_uncore_event_init()
260 struct pmu *ptr = dev_get_drvdata(dev); in amd_uncore_attr_show_cpumask()
261 struct amd_uncore_pmu *pmu = container_of(ptr, struct amd_uncore_pmu, pmu); in amd_uncore_attr_show_cpumask() local
263 return cpumap_print_to_pagebuf(true, buf, &pmu->active_mask); in amd_uncore_attr_show_cpumask()
413 struct amd_uncore_pmu *pmu; in amd_uncore_ctx_free() local
421 pmu = &uncore->pmus[i]; in amd_uncore_ctx_free()
422 ctx = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_free()
427 cpumask_clear_cpu(cpu, &pmu->active_mask); in amd_uncore_ctx_free()
434 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_free()
441 struct amd_uncore_pmu *pmu; in amd_uncore_ctx_init() local
451 pmu = &uncore->pmus[i]; in amd_uncore_ctx_init()
452 *per_cpu_ptr(pmu->ctx, cpu) = NULL; in amd_uncore_ctx_init()
456 if (gid != pmu->group) in amd_uncore_ctx_init()
464 prev = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_init()
483 pmu->num_counters, in amd_uncore_ctx_init()
490 cpumask_set_cpu(cpu, &pmu->active_mask); in amd_uncore_ctx_init()
494 *per_cpu_ptr(pmu->ctx, cpu) = curr; in amd_uncore_ctx_init()
508 struct amd_uncore_pmu *pmu; in amd_uncore_ctx_move() local
515 pmu = &uncore->pmus[i]; in amd_uncore_ctx_move()
516 curr = *per_cpu_ptr(pmu->ctx, cpu); in amd_uncore_ctx_move()
522 next = *per_cpu_ptr(pmu->ctx, j); in amd_uncore_ctx_move()
527 perf_pmu_migrate_context(&pmu->pmu, cpu, j); in amd_uncore_ctx_move()
528 cpumask_clear_cpu(cpu, &pmu->active_mask); in amd_uncore_ctx_move()
529 cpumask_set_cpu(j, &pmu->active_mask); in amd_uncore_ctx_move()
656 struct amd_uncore_pmu *pmu; in amd_uncore_df_ctx_init() local
674 pmu = &uncore->pmus[0]; in amd_uncore_df_ctx_init()
675 strscpy(pmu->name, boot_cpu_data.x86 >= 0x17 ? "amd_df" : "amd_nb", in amd_uncore_df_ctx_init()
676 sizeof(pmu->name)); in amd_uncore_df_ctx_init()
677 pmu->num_counters = amd_uncore_ctx_num_pmcs(uncore, cpu); in amd_uncore_df_ctx_init()
678 pmu->msr_base = MSR_F15H_NB_PERF_CTL; in amd_uncore_df_ctx_init()
679 pmu->rdpmc_base = RDPMC_BASE_NB; in amd_uncore_df_ctx_init()
680 pmu->group = amd_uncore_ctx_gid(uncore, cpu); in amd_uncore_df_ctx_init()
689 pmu->ctx = alloc_percpu(struct amd_uncore_ctx *); in amd_uncore_df_ctx_init()
690 if (!pmu->ctx) in amd_uncore_df_ctx_init()
693 pmu->pmu = (struct pmu) { in amd_uncore_df_ctx_init()
696 .name = pmu->name, in amd_uncore_df_ctx_init()
707 if (perf_pmu_register(&pmu->pmu, pmu->pmu.name, -1)) { in amd_uncore_df_ctx_init()
708 free_percpu(pmu->ctx); in amd_uncore_df_ctx_init()
709 pmu->ctx = NULL; in amd_uncore_df_ctx_init()
713 pr_info("%d %s%s counters detected\n", pmu->num_counters, in amd_uncore_df_ctx_init()
715 pmu->pmu.name); in amd_uncore_df_ctx_init()
787 struct amd_uncore_pmu *pmu; in amd_uncore_l3_ctx_init() local
805 pmu = &uncore->pmus[0]; in amd_uncore_l3_ctx_init()
806 strscpy(pmu->name, boot_cpu_data.x86 >= 0x17 ? "amd_l3" : "amd_l2", in amd_uncore_l3_ctx_init()
807 sizeof(pmu->name)); in amd_uncore_l3_ctx_init()
808 pmu->num_counters = amd_uncore_ctx_num_pmcs(uncore, cpu); in amd_uncore_l3_ctx_init()
809 pmu->msr_base = MSR_F16H_L2I_PERF_CTL; in amd_uncore_l3_ctx_init()
810 pmu->rdpmc_base = RDPMC_BASE_LLC; in amd_uncore_l3_ctx_init()
811 pmu->group = amd_uncore_ctx_gid(uncore, cpu); in amd_uncore_l3_ctx_init()
821 pmu->ctx = alloc_percpu(struct amd_uncore_ctx *); in amd_uncore_l3_ctx_init()
822 if (!pmu->ctx) in amd_uncore_l3_ctx_init()
825 pmu->pmu = (struct pmu) { in amd_uncore_l3_ctx_init()
829 .name = pmu->name, in amd_uncore_l3_ctx_init()
840 if (perf_pmu_register(&pmu->pmu, pmu->pmu.name, -1)) { in amd_uncore_l3_ctx_init()
841 free_percpu(pmu->ctx); in amd_uncore_l3_ctx_init()
842 pmu->ctx = NULL; in amd_uncore_l3_ctx_init()
846 pr_info("%d %s%s counters detected\n", pmu->num_counters, in amd_uncore_l3_ctx_init()
848 pmu->pmu.name); in amd_uncore_l3_ctx_init()
908 struct amd_uncore_pmu *pmu; in amd_uncore_umc_ctx_init() local
940 pmu = &uncore->pmus[index]; in amd_uncore_umc_ctx_init()
941 snprintf(pmu->name, sizeof(pmu->name), "amd_umc_%d", index); in amd_uncore_umc_ctx_init()
942 pmu->num_counters = group_num_pmcs[gid] / group_num_pmus[gid]; in amd_uncore_umc_ctx_init()
943 pmu->msr_base = MSR_F19H_UMC_PERF_CTL + i * pmu->num_counters * 2; in amd_uncore_umc_ctx_init()
944 pmu->rdpmc_base = -1; in amd_uncore_umc_ctx_init()
945 pmu->group = gid; in amd_uncore_umc_ctx_init()
947 pmu->ctx = alloc_percpu(struct amd_uncore_ctx *); in amd_uncore_umc_ctx_init()
948 if (!pmu->ctx) in amd_uncore_umc_ctx_init()
951 pmu->pmu = (struct pmu) { in amd_uncore_umc_ctx_init()
954 .name = pmu->name, in amd_uncore_umc_ctx_init()
965 if (perf_pmu_register(&pmu->pmu, pmu->pmu.name, -1)) { in amd_uncore_umc_ctx_init()
966 free_percpu(pmu->ctx); in amd_uncore_umc_ctx_init()
967 pmu->ctx = NULL; in amd_uncore_umc_ctx_init()
971 pr_info("%d %s counters detected\n", pmu->num_counters, in amd_uncore_umc_ctx_init()
972 pmu->pmu.name); in amd_uncore_umc_ctx_init()
1083 struct amd_uncore_pmu *pmu; in amd_uncore_exit() local
1099 pmu = &uncore->pmus[j]; in amd_uncore_exit()
1100 if (!pmu->ctx) in amd_uncore_exit()
1103 perf_pmu_unregister(&pmu->pmu); in amd_uncore_exit()
1104 free_percpu(pmu->ctx); in amd_uncore_exit()
1105 pmu->ctx = NULL; in amd_uncore_exit()