Lines Matching +full:cortex +full:- +full:a8
1 // SPDX-License-Identifier: GPL-2.0
3 * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
11 * Cortex-A8 has up to 4 configurable performance counters and
13 * Cortex-A9 has up to 31 configurable performance counters and
53 * - all (taken) branch instructions,
54 * - instructions that explicitly write the PC,
55 * - exception generating instructions.
80 /* ARMv7 Cortex-A8 specific event types */
86 /* ARMv7 Cortex-A9 specific event types */
91 /* ARMv7 Cortex-A5 specific event types */
95 /* ARMv7 Cortex-A15 specific event types */
111 /* ARMv7 Cortex-A12 specific event types */
150 * Cortex-A8 HW events mapping
203 * Cortex-A9 HW events mapping
247 * Cortex-A5 HW events mapping
293 * Cortex-A15 HW events mapping
342 * Cortex-A7 HW events mapping
391 * Cortex-A12 HW events mapping
533 PMU_FORMAT_ATTR(event, "config:0-7");
659 * Per-CPU PMNC: config reg
666 #define ARMV7_PMNC_DP (1 << 5) /* Disable CCNT if non-invasive debug*/
693 #define ARMV7_SDER_SUNIDEN BIT(1) /* Permit non-invasive debug */
716 return test_bit(idx, cpu_pmu->cntr_mask); in armv7_pmnc_counter_valid()
732 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_read_counter()
733 struct hw_perf_event *hwc = &event->hw; in armv7pmu_read_counter()
734 int idx = hwc->idx; in armv7pmu_read_counter()
752 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_write_counter()
753 struct hw_perf_event *hwc = &event->hw; in armv7pmu_write_counter()
754 int idx = hwc->idx; in armv7pmu_write_counter()
838 for_each_set_bit(cnt, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in armv7_pmnc_dump_regs()
850 struct hw_perf_event *hwc = &event->hw; in armv7pmu_enable_event()
851 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_enable_event()
852 int idx = hwc->idx; in armv7pmu_enable_event()
865 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER) in armv7pmu_enable_event()
866 armv7_pmnc_write_evtsel(idx, hwc->config_base); in armv7pmu_enable_event()
874 struct hw_perf_event *hwc = &event->hw; in armv7pmu_disable_event()
875 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_disable_event()
876 int idx = hwc->idx; in armv7pmu_disable_event()
892 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv7pmu_handle_irq()
912 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMPMU_MAX_HWEVENTS) { in armv7pmu_handle_irq()
913 struct perf_event *event = cpuc->events[idx]; in armv7pmu_handle_irq()
927 hwc = &event->hw; in armv7pmu_handle_irq()
929 perf_sample_data_init(&data, 0, hwc->last_period); in armv7pmu_handle_irq()
934 cpu_pmu->disable(event); in armv7pmu_handle_irq()
965 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in armv7pmu_get_event_idx()
966 struct hw_perf_event *hwc = &event->hw; in armv7pmu_get_event_idx()
967 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT; in armv7pmu_get_event_idx()
971 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask)) in armv7pmu_get_event_idx()
972 return -EAGAIN; in armv7pmu_get_event_idx()
981 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in armv7pmu_get_event_idx()
982 if (!test_and_set_bit(idx, cpuc->used_mask)) in armv7pmu_get_event_idx()
987 return -EAGAIN; in armv7pmu_get_event_idx()
993 clear_bit(event->hw.idx, cpuc->used_mask); in armv7pmu_clear_event_idx()
1004 if (attr->exclude_idle) { in armv7pmu_set_event_filter()
1006 return -EOPNOTSUPP; in armv7pmu_set_event_filter()
1008 if (attr->exclude_user) in armv7pmu_set_event_filter()
1010 if (attr->exclude_kernel) in armv7pmu_set_event_filter()
1012 if (!attr->exclude_hv) in armv7pmu_set_event_filter()
1019 event->config_base = config_base; in armv7pmu_set_event_filter()
1029 if (cpu_pmu->secure_access) { in armv7pmu_reset()
1036 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMPMU_MAX_HWEVENTS) { in armv7pmu_reset()
1101 cpu_pmu->handle_irq = armv7pmu_handle_irq; in armv7pmu_init()
1102 cpu_pmu->enable = armv7pmu_enable_event; in armv7pmu_init()
1103 cpu_pmu->disable = armv7pmu_disable_event; in armv7pmu_init()
1104 cpu_pmu->read_counter = armv7pmu_read_counter; in armv7pmu_init()
1105 cpu_pmu->write_counter = armv7pmu_write_counter; in armv7pmu_init()
1106 cpu_pmu->get_event_idx = armv7pmu_get_event_idx; in armv7pmu_init()
1107 cpu_pmu->clear_event_idx = armv7pmu_clear_event_idx; in armv7pmu_init()
1108 cpu_pmu->start = armv7pmu_start; in armv7pmu_init()
1109 cpu_pmu->stop = armv7pmu_stop; in armv7pmu_init()
1110 cpu_pmu->reset = armv7pmu_reset; in armv7pmu_init()
1120 bitmap_set(cpu_pmu->cntr_mask, 0, nb_cnt); in armv7_read_num_pmnc_events()
1123 set_bit(ARMV7_IDX_CYCLE_COUNTER, cpu_pmu->cntr_mask); in armv7_read_num_pmnc_events()
1128 return smp_call_function_any(&arm_pmu->supported_cpus, in armv7_probe_num_events()
1136 cpu_pmu->name = "armv7_cortex_a8"; in armv7_a8_pmu_init()
1137 cpu_pmu->map_event = armv7_a8_map_event; in armv7_a8_pmu_init()
1138 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a8_pmu_init()
1140 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a8_pmu_init()
1148 cpu_pmu->name = "armv7_cortex_a9"; in armv7_a9_pmu_init()
1149 cpu_pmu->map_event = armv7_a9_map_event; in armv7_a9_pmu_init()
1150 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a9_pmu_init()
1152 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a9_pmu_init()
1160 cpu_pmu->name = "armv7_cortex_a5"; in armv7_a5_pmu_init()
1161 cpu_pmu->map_event = armv7_a5_map_event; in armv7_a5_pmu_init()
1162 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a5_pmu_init()
1164 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a5_pmu_init()
1172 cpu_pmu->name = "armv7_cortex_a15"; in armv7_a15_pmu_init()
1173 cpu_pmu->map_event = armv7_a15_map_event; in armv7_a15_pmu_init()
1174 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in armv7_a15_pmu_init()
1175 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a15_pmu_init()
1177 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a15_pmu_init()
1185 cpu_pmu->name = "armv7_cortex_a7"; in armv7_a7_pmu_init()
1186 cpu_pmu->map_event = armv7_a7_map_event; in armv7_a7_pmu_init()
1187 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in armv7_a7_pmu_init()
1188 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a7_pmu_init()
1190 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a7_pmu_init()
1198 cpu_pmu->name = "armv7_cortex_a12"; in armv7_a12_pmu_init()
1199 cpu_pmu->map_event = armv7_a12_map_event; in armv7_a12_pmu_init()
1200 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in armv7_a12_pmu_init()
1201 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a12_pmu_init()
1203 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a12_pmu_init()
1211 cpu_pmu->name = "armv7_cortex_a17"; in armv7_a17_pmu_init()
1212 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] = in armv7_a17_pmu_init()
1214 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] = in armv7_a17_pmu_init()
1223 * +--------------------------------+
1225 * +--------------------------------+
1227 * +--------------------------------+
1229 * +--------------------------------+
1231 * +--------------------------------+
1236 * hwc->config_base = 0xNRCCG
1368 /* Mix in mode-exclusion bits */ in krait_evt_setup()
1428 struct hw_perf_event *hwc = &event->hw; in krait_pmu_disable_event()
1429 int idx = hwc->idx; in krait_pmu_disable_event()
1439 if (hwc->config_base & KRAIT_EVENT_MASK) in krait_pmu_disable_event()
1440 krait_clearpmu(hwc->config_base); in krait_pmu_disable_event()
1448 struct hw_perf_event *hwc = &event->hw; in krait_pmu_enable_event()
1449 int idx = hwc->idx; in krait_pmu_enable_event()
1456 if (hwc->config_base & KRAIT_EVENT_MASK) in krait_pmu_enable_event()
1457 krait_evt_setup(idx, hwc->config_base); in krait_pmu_enable_event()
1459 armv7_pmnc_write_evtsel(idx, hwc->config_base); in krait_pmu_enable_event()
1483 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in krait_pmu_reset()
1494 struct hw_perf_event *hwc = &event->hw; in krait_event_to_bit()
1495 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in krait_event_to_bit()
1497 if (hwc->config_base & VENUM_EVENT) in krait_event_to_bit()
1501 bit -= krait_get_pmresrn_event(0); in krait_event_to_bit()
1507 bit += bitmap_weight(cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX); in krait_event_to_bit()
1520 int bit = -1; in krait_pmu_get_event_idx()
1521 struct hw_perf_event *hwc = &event->hw; in krait_pmu_get_event_idx()
1522 unsigned int region = EVENT_REGION(hwc->config_base); in krait_pmu_get_event_idx()
1523 unsigned int code = EVENT_CODE(hwc->config_base); in krait_pmu_get_event_idx()
1524 unsigned int group = EVENT_GROUP(hwc->config_base); in krait_pmu_get_event_idx()
1525 bool venum_event = EVENT_VENUM(hwc->config_base); in krait_pmu_get_event_idx()
1526 bool krait_event = EVENT_CPU(hwc->config_base); in krait_pmu_get_event_idx()
1531 return -EINVAL; in krait_pmu_get_event_idx()
1533 return -EINVAL; in krait_pmu_get_event_idx()
1536 if (test_and_set_bit(bit, cpuc->used_mask)) in krait_pmu_get_event_idx()
1537 return -EAGAIN; in krait_pmu_get_event_idx()
1542 clear_bit(bit, cpuc->used_mask); in krait_pmu_get_event_idx()
1551 struct hw_perf_event *hwc = &event->hw; in krait_pmu_clear_event_idx()
1552 unsigned int region = EVENT_REGION(hwc->config_base); in krait_pmu_clear_event_idx()
1553 unsigned int group = EVENT_GROUP(hwc->config_base); in krait_pmu_clear_event_idx()
1554 bool venum_event = EVENT_VENUM(hwc->config_base); in krait_pmu_clear_event_idx()
1555 bool krait_event = EVENT_CPU(hwc->config_base); in krait_pmu_clear_event_idx()
1560 clear_bit(bit, cpuc->used_mask); in krait_pmu_clear_event_idx()
1567 cpu_pmu->name = "armv7_krait"; in krait_pmu_init()
1569 if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node, in krait_pmu_init()
1570 "qcom,no-pc-write")) in krait_pmu_init()
1571 cpu_pmu->map_event = krait_map_event_no_branch; in krait_pmu_init()
1573 cpu_pmu->map_event = krait_map_event; in krait_pmu_init()
1574 cpu_pmu->set_event_filter = armv7pmu_set_event_filter; in krait_pmu_init()
1575 cpu_pmu->reset = krait_pmu_reset; in krait_pmu_init()
1576 cpu_pmu->enable = krait_pmu_enable_event; in krait_pmu_init()
1577 cpu_pmu->disable = krait_pmu_disable_event; in krait_pmu_init()
1578 cpu_pmu->get_event_idx = krait_pmu_get_event_idx; in krait_pmu_init()
1579 cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx; in krait_pmu_init()
1587 * +--------------------------------+
1589 * +--------------------------------+
1591 * +--------------------------------+
1593 * +--------------------------------+
1595 * +--------------------------------+
1597 * +--------------------------------+
1603 * hwc->config_base = 0xNRCCG
1692 /* Mix in mode-exclusion bits */ in scorpion_evt_setup()
1738 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_disable_event()
1739 int idx = hwc->idx; in scorpion_pmu_disable_event()
1749 if (hwc->config_base & KRAIT_EVENT_MASK) in scorpion_pmu_disable_event()
1750 scorpion_clearpmu(hwc->config_base); in scorpion_pmu_disable_event()
1758 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_enable_event()
1759 int idx = hwc->idx; in scorpion_pmu_enable_event()
1766 if (hwc->config_base & KRAIT_EVENT_MASK) in scorpion_pmu_enable_event()
1767 scorpion_evt_setup(idx, hwc->config_base); in scorpion_pmu_enable_event()
1769 armv7_pmnc_write_evtsel(idx, hwc->config_base); in scorpion_pmu_enable_event()
1794 for_each_set_bit(idx, cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX) { in scorpion_pmu_reset()
1804 struct hw_perf_event *hwc = &event->hw; in scorpion_event_to_bit()
1805 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu); in scorpion_event_to_bit()
1807 if (hwc->config_base & VENUM_EVENT) in scorpion_event_to_bit()
1811 bit -= scorpion_get_pmresrn_event(0); in scorpion_event_to_bit()
1817 bit += bitmap_weight(cpu_pmu->cntr_mask, ARMV7_IDX_COUNTER_MAX); in scorpion_event_to_bit()
1830 int bit = -1; in scorpion_pmu_get_event_idx()
1831 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_get_event_idx()
1832 unsigned int region = EVENT_REGION(hwc->config_base); in scorpion_pmu_get_event_idx()
1833 unsigned int group = EVENT_GROUP(hwc->config_base); in scorpion_pmu_get_event_idx()
1834 bool venum_event = EVENT_VENUM(hwc->config_base); in scorpion_pmu_get_event_idx()
1835 bool scorpion_event = EVENT_CPU(hwc->config_base); in scorpion_pmu_get_event_idx()
1840 return -EINVAL; in scorpion_pmu_get_event_idx()
1843 if (test_and_set_bit(bit, cpuc->used_mask)) in scorpion_pmu_get_event_idx()
1844 return -EAGAIN; in scorpion_pmu_get_event_idx()
1849 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_get_event_idx()
1858 struct hw_perf_event *hwc = &event->hw; in scorpion_pmu_clear_event_idx()
1859 unsigned int region = EVENT_REGION(hwc->config_base); in scorpion_pmu_clear_event_idx()
1860 unsigned int group = EVENT_GROUP(hwc->config_base); in scorpion_pmu_clear_event_idx()
1861 bool venum_event = EVENT_VENUM(hwc->config_base); in scorpion_pmu_clear_event_idx()
1862 bool scorpion_event = EVENT_CPU(hwc->config_base); in scorpion_pmu_clear_event_idx()
1867 clear_bit(bit, cpuc->used_mask); in scorpion_pmu_clear_event_idx()
1874 cpu_pmu->name = "armv7_scorpion"; in scorpion_pmu_init()
1875 cpu_pmu->map_event = scorpion_map_event; in scorpion_pmu_init()
1876 cpu_pmu->reset = scorpion_pmu_reset; in scorpion_pmu_init()
1877 cpu_pmu->enable = scorpion_pmu_enable_event; in scorpion_pmu_init()
1878 cpu_pmu->disable = scorpion_pmu_disable_event; in scorpion_pmu_init()
1879 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx; in scorpion_pmu_init()
1880 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx; in scorpion_pmu_init()
1887 cpu_pmu->name = "armv7_scorpion_mp"; in scorpion_mp_pmu_init()
1888 cpu_pmu->map_event = scorpion_map_event; in scorpion_mp_pmu_init()
1889 cpu_pmu->reset = scorpion_pmu_reset; in scorpion_mp_pmu_init()
1890 cpu_pmu->enable = scorpion_pmu_enable_event; in scorpion_mp_pmu_init()
1891 cpu_pmu->disable = scorpion_pmu_disable_event; in scorpion_mp_pmu_init()
1892 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx; in scorpion_mp_pmu_init()
1893 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx; in scorpion_mp_pmu_init()
1898 {.compatible = "arm,cortex-a17-pmu", .data = armv7_a17_pmu_init},
1899 {.compatible = "arm,cortex-a15-pmu", .data = armv7_a15_pmu_init},
1900 {.compatible = "arm,cortex-a12-pmu", .data = armv7_a12_pmu_init},
1901 {.compatible = "arm,cortex-a9-pmu", .data = armv7_a9_pmu_init},
1902 {.compatible = "arm,cortex-a8-pmu", .data = armv7_a8_pmu_init},
1903 {.compatible = "arm,cortex-a7-pmu", .data = armv7_a7_pmu_init},
1904 {.compatible = "arm,cortex-a5-pmu", .data = armv7_a5_pmu_init},
1905 {.compatible = "qcom,krait-pmu", .data = krait_pmu_init},
1906 {.compatible = "qcom,scorpion-pmu", .data = scorpion_pmu_init},
1907 {.compatible = "qcom,scorpion-mp-pmu", .data = scorpion_mp_pmu_init},
1918 .name = "armv7-pmu",