Lines Matching +full:0 +full:x10

24 #define PMU_PMCR_E         (1 << 0)
33 #define PMU_PMCR_N_MASK 0x1f
35 #define PMU_PMCR_ID_MASK 0xff
37 #define PMU_PMCR_IMP_MASK 0xff
44 #define SW_INCR 0x0
45 #define INST_RETIRED 0x8
46 #define CPU_CYCLES 0x11
47 #define MEM_ACCESS 0x13
48 #define INST_PREC 0x1B
49 #define STALL_FRONTEND 0x23
50 #define STALL_BACKEND 0x24
51 #define CHAIN 0x1E
53 #define COMMON_EVENTS_LOW 0x0
54 #define COMMON_EVENTS_HIGH 0x3F
55 #define EXT_COMMON_EVENTS_LOW 0x4000
56 #define EXT_COMMON_EVENTS_HIGH 0x403F
58 #define ALL_SET_32 0x00000000FFFFFFFFULL
59 #define ALL_SET_64 0xFFFFFFFFFFFFFFFFULL
64 #define ALL_CLEAR 0x0000000000000000ULL
65 #define PRE_OVERFLOW_32 0x00000000FFFFFFF0ULL
66 #define PRE_OVERFLOW_64 0xFFFFFFFFFFFFFFF0ULL
102 #define ID_DFR0_PERFMON_MASK 0xf
104 #define ID_DFR0_PMU_NOTIMPL 0b0000
105 #define ID_DFR0_PMU_V1 0b0001
106 #define ID_DFR0_PMU_V2 0b0010
107 #define ID_DFR0_PMU_V3 0b0011
108 #define ID_DFR0_PMU_V3_8_1 0b0100
109 #define ID_DFR0_PMU_V3_8_4 0b0101
110 #define ID_DFR0_PMU_V3_8_5 0b0110
111 #define ID_DFR0_PMU_IMPDEF 0b1111
113 #define PMCR __ACCESS_CP15(c9, 0, c12, 0)
114 #define ID_DFR0 __ACCESS_CP15(c0, 0, c1, 2)
115 #define PMSELR __ACCESS_CP15(c9, 0, c12, 5)
116 #define PMXEVTYPER __ACCESS_CP15(c9, 0, c13, 1)
117 #define PMCNTENSET __ACCESS_CP15(c9, 0, c12, 1)
118 #define PMCNTENCLR __ACCESS_CP15(c9, 0, c12, 2)
119 #define PMOVSR __ACCESS_CP15(c9, 0, c12, 3)
120 #define PMCCNTR32 __ACCESS_CP15(c9, 0, c13, 0)
121 #define PMINTENCLR __ACCESS_CP15(c9, 0, c14, 2)
122 #define PMCCNTR64 __ACCESS_CP15_64(0, c9)
141 write_sysreg(value & 0xffffffff, PMCCNTR32); in set_pmccntr()
162 " mcr p15, 0, %[pmcr], c9, c12, 0\n" in precise_instrs_loop()
166 " mcr p15, 0, %[z], c9, c12, 0\n" in precise_instrs_loop()
169 : [pmcr] "r" (pmcr), [z] "r" (0) in precise_instrs_loop()
200 #define ID_AA64DFR0_PERFMON_MASK 0xf
202 #define ID_DFR0_PMU_NOTIMPL 0b0000
203 #define ID_DFR0_PMU_V3 0b0001
204 #define ID_DFR0_PMU_V3_8_1 0b0100
205 #define ID_DFR0_PMU_V3_8_4 0b0101
206 #define ID_DFR0_PMU_V3_8_5 0b0110
207 #define ID_DFR0_PMU_IMPDEF 0b1111
261 * event support for events 0-31/32-63. Their High in is_event_supported()
263 * starting at 0x4000, using the same split. in is_event_supported()
273 supported = reg & (1UL << (n & 0x3F)); in is_event_supported()
276 report_info("event 0x%x is not supported", n); in is_event_supported()
319 " mov x10, %[loop]\n" in mem_access_loop()
320 "1: sub x10, x10, #1\n" in mem_access_loop()
322 " cmp x10, #0x0\n" in mem_access_loop()
329 : "x9", "x10", "cc"); in mem_access_loop()
345 for (i = 0; i < 32; i++) { in irq_handler()
363 for (i = 0; i < 32; i++) in pmu_reset_stats()
364 pmu_stats.interrupts[i] = 0; in pmu_reset_stats()
366 pmu_stats.bitmap = 0; in pmu_reset_stats()
397 * select counter 0 in test_event_counter_config()
399 write_sysreg(0, PMSELR_EL0); in test_event_counter_config()
401 write_sysreg(0xEA, PMXEVTYPER_EL0); in test_event_counter_config()
402 write_sysreg(0xdeadbeef, PMXEVCNTR_EL0); in test_event_counter_config()
403 report((read_regn_el0(pmevtyper, 0) & 0xFFF) == 0xEA, in test_event_counter_config()
405 report((read_regn_el0(pmevcntr, 0) == 0xdeadbeef), in test_event_counter_config()
408 /* try to configure an unsupported event within the range [0x0, 0x3F] */ in test_event_counter_config()
409 for (i = 0; i <= 0x3F; i++) { in test_event_counter_config()
413 if (i > 0x3F) { in test_event_counter_config()
414 report_skip("pmevtyper: all events within [0x0, 0x3F] are supported"); in test_event_counter_config()
418 /* select counter 0 */ in test_event_counter_config()
419 write_sysreg(0, PMSELR_EL0); in test_event_counter_config()
438 for (i = 0; i < nb_events; i++) { in satisfy_prerequisites()
440 report_skip("Skip test as event 0x%x is not supported", in satisfy_prerequisites()
451 * Bits [63:0] are always incremented for 64-bit counters, in pmevcntr_mask()
453 * bits [31:0] in pmevcntr_mask()
459 return ~0; in pmevcntr_mask()
461 return (uint32_t)~0; in pmevcntr_mask()
478 uint64_t pmcr_lp = overflow_at_64bits ? PMU_PMCR_LP : 0; in test_basic_event_count()
490 write_regn_el0(pmevtyper, 0, CPU_CYCLES | PMEVTYPER_EXCLUDE_EL0); in test_basic_event_count()
506 /* Preset counter #0 to pre overflow value to trigger an overflow */ in test_basic_event_count()
507 write_regn_el0(pmevcntr, 0, pre_overflow); in test_basic_event_count()
508 report(read_regn_el0(pmevcntr, 0) == pre_overflow, in test_basic_event_count()
509 "counter #0 preset to pre-overflow value"); in test_basic_event_count()
510 report(!read_regn_el0(pmevcntr, 1), "counter #1 is 0"); in test_basic_event_count()
523 /* Disable all counters but counters #0 and #1 */ in test_basic_event_count()
524 write_sysreg_s(~0x3, PMCNTENCLR_EL0); in test_basic_event_count()
526 (read_sysreg_s(PMCNTENSET_EL0) == 0x3), in test_basic_event_count()
527 "pmcntenset: just enabled #0 and #1"); in test_basic_event_count()
531 report(!read_sysreg(pmovsclr_el0), "check overflow reg is 0"); in test_basic_event_count()
536 "pmintenclr_el1=0, all interrupts disabled"); in test_basic_event_count()
547 report_info("counter #0 is 0x%lx (CPU_CYCLES)", in test_basic_event_count()
548 read_regn_el0(pmevcntr, 0)); in test_basic_event_count()
549 report_info("counter #1 is 0x%lx (INST_RETIRED)", in test_basic_event_count()
552 report_info("overflow reg = 0x%lx", read_sysreg(pmovsclr_el0)); in test_basic_event_count()
553 report(read_sysreg(pmovsclr_el0) == 0x1, in test_basic_event_count()
554 "check overflow happened on #0 only"); in test_basic_event_count()
562 uint64_t pmcr_lp = overflow_at_64bits ? PMU_PMCR_LP : 0; in test_mem_access()
570 write_regn_el0(pmevtyper, 0, MEM_ACCESS | PMEVTYPER_EXCLUDE_EL0); in test_mem_access()
572 write_sysreg_s(0x3, PMCNTENSET_EL0); in test_mem_access()
575 report_info("counter #0 is 0x%lx (MEM_ACCESS)", read_regn_el0(pmevcntr, 0)); in test_mem_access()
576 report_info("counter #1 is 0x%lx (MEM_ACCESS)", read_regn_el0(pmevcntr, 1)); in test_mem_access()
578 report((read_regn_el0(pmevcntr, 0) == read_regn_el0(pmevcntr, 1)) && in test_mem_access()
579 (read_regn_el0(pmevcntr, 0) >= 20) && !read_sysreg(pmovsclr_el0), in test_mem_access()
584 write_regn_el0(pmevcntr, 0, pre_overflow); in test_mem_access()
586 write_sysreg_s(0x3, PMCNTENSET_EL0); in test_mem_access()
589 report(read_sysreg(pmovsclr_el0) == 0x3, in test_mem_access()
591 report_info("cnt#0=0x%lx cnt#1=0x%lx overflow=0x%lx", in test_mem_access()
592 read_regn_el0(pmevcntr, 0), read_regn_el0(pmevcntr, 1), in test_mem_access()
599 uint64_t pmcr_lp = overflow_at_64bits ? PMU_PMCR_LP : 0; in test_sw_incr()
610 write_regn_el0(pmevtyper, 0, SW_INCR | PMEVTYPER_EXCLUDE_EL0); in test_sw_incr()
612 /* enable counters #0 and #1 */ in test_sw_incr()
613 write_sysreg_s(0x3, PMCNTENSET_EL0); in test_sw_incr()
615 write_regn_el0(pmevcntr, 0, pre_overflow); in test_sw_incr()
618 for (i = 0; i < 100; i++) in test_sw_incr()
619 write_sysreg(0x1, pmswinc_el0); in test_sw_incr()
622 report_info("SW_INCR counter #0 has value 0x%lx", read_regn_el0(pmevcntr, 0)); in test_sw_incr()
623 report(read_regn_el0(pmevcntr, 0) == pre_overflow, in test_sw_incr()
628 write_regn_el0(pmevcntr, 0, pre_overflow); in test_sw_incr()
629 write_sysreg_s(0x3, PMCNTENSET_EL0); in test_sw_incr()
633 for (i = 0; i < 100; i++) in test_sw_incr()
634 write_sysreg(0x3, pmswinc_el0); in test_sw_incr()
637 report(read_regn_el0(pmevcntr, 0) == cntr0, "counter #0 after + 100 SW_INCR"); in test_sw_incr()
639 report_info("counter values after 100 SW_INCR #0=0x%lx #1=0x%lx", in test_sw_incr()
640 read_regn_el0(pmevcntr, 0), read_regn_el0(pmevcntr, 1)); in test_sw_incr()
641 report(read_sysreg(pmovsclr_el0) == 0x1, in test_sw_incr()
642 "overflow on counter #0 after 100 SW_INCR"); in test_sw_incr()
671 write_regn_el0(pmevtyper, 0, CPU_CYCLES | PMEVTYPER_EXCLUDE_EL0); in test_chained_counters()
673 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW_32); in test_chained_counters()
674 enable_chain_counter(0); in test_chained_counters()
679 report(read_sysreg(pmovsclr_el0) == 0x1, "overflow recorded for chained incr #1"); in test_chained_counters()
685 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW_32); in test_chained_counters()
686 write_regn_el0(pmevcntr, 1, 0x1); in test_chained_counters()
687 enable_chain_counter(0); in test_chained_counters()
689 report_info("overflow reg = 0x%lx", read_sysreg(pmovsclr_el0)); in test_chained_counters()
691 report(read_sysreg(pmovsclr_el0) == 0x1, "overflow recorded for chained incr #2"); in test_chained_counters()
693 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW_32); in test_chained_counters()
697 report_info("overflow reg = 0x%lx", read_sysreg(pmovsclr_el0)); in test_chained_counters()
698 report(read_regn_el0(pmevcntr, 1) == 0, "CHAIN counter #1 wrapped"); in test_chained_counters()
699 report(read_sysreg(pmovsclr_el0) == 0x3, "overflow on even and odd counters"); in test_chained_counters()
714 write_regn_el0(pmevtyper, 0, SW_INCR | PMEVTYPER_EXCLUDE_EL0); in test_chained_sw_incr()
716 enable_chain_counter(0); in test_chained_sw_incr()
718 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW_32); in test_chained_sw_incr()
722 for (i = 0; i < 100; i++) in test_chained_sw_incr()
723 write_sysreg(0x1, pmswinc_el0); in test_chained_sw_incr()
726 report((read_sysreg(pmovsclr_el0) == 0x1) && in test_chained_sw_incr()
729 report_info("overflow=0x%lx, #0=0x%lx #1=0x%lx", read_sysreg(pmovsclr_el0), in test_chained_sw_incr()
730 read_regn_el0(pmevcntr, 0), read_regn_el0(pmevcntr, 1)); in test_chained_sw_incr()
735 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW_32); in test_chained_sw_incr()
737 enable_chain_counter(0); in test_chained_sw_incr()
741 for (i = 0; i < 100; i++) in test_chained_sw_incr()
742 write_sysreg(0x1, pmswinc_el0); in test_chained_sw_incr()
745 report((read_sysreg(pmovsclr_el0) == 0x3) && in test_chained_sw_incr()
746 (read_regn_el0(pmevcntr, 0) == cntr0) && in test_chained_sw_incr()
749 report_info("overflow=0x%lx, #0=0x%lx #1=0x%lx", read_sysreg(pmovsclr_el0), in test_chained_sw_incr()
750 read_regn_el0(pmevcntr, 0), read_regn_el0(pmevcntr, 1)); in test_chained_sw_incr()
753 report_info("%s #1=0x%lx #0=0x%lx overflow=0x%lx", __s, \
755 read_regn_el0(pmevcntr, 0), \
768 uint64_t cntr_val, num_events, max = 0, min = pmevcntr_mask(); in test_mem_access_reliability()
771 uint64_t pmcr_lp = overflow_at_64bits ? PMU_PMCR_LP : 0; in test_mem_access_reliability()
779 write_regn_el0(pmevtyper, 0, MEM_ACCESS | PMEVTYPER_EXCLUDE_EL0); in test_mem_access_reliability()
780 for (int i = 0; i < 100; i++) { in test_mem_access_reliability()
782 write_regn_el0(pmevcntr, 0, pre_overflow2); in test_mem_access_reliability()
783 write_sysreg_s(0x1, PMCNTENSET_EL0); in test_mem_access_reliability()
786 cntr_val = read_regn_el0(pmevcntr, 0); in test_mem_access_reliability()
816 write_regn_el0(pmevtyper, 0, MEM_ACCESS | PMEVTYPER_EXCLUDE_EL0); in test_chain_promotion()
818 write_sysreg_s(0x2, PMCNTENSET_EL0); in test_chain_promotion()
823 report(!read_regn_el0(pmevcntr, 0), in test_chain_promotion()
830 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW_32); in test_chain_promotion()
831 write_sysreg_s(0x1, PMCNTENSET_EL0); in test_chain_promotion()
836 report(!read_regn_el0(pmevcntr, 1) && (read_sysreg(pmovsclr_el0) == 0x1), in test_chain_promotion()
843 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW2_32); in test_chain_promotion()
844 enable_chain_counter(0); in test_chain_promotion()
851 disable_chain_counter(0); in test_chain_promotion()
852 write_sysreg_s(0x1, PMCNTENSET_EL0); /* Enable the low counter */ in test_chain_promotion()
856 report(read_sysreg(pmovsclr_el0) == 0x1, in test_chain_promotion()
857 "should have triggered an overflow on #0"); in test_chain_promotion()
866 write_sysreg_s(0x1, PMCNTENSET_EL0); in test_chain_promotion()
867 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW2_32); in test_chain_promotion()
875 write_sysreg_s(0x1, PMCNTENCLR_EL0); in test_chain_promotion()
877 enable_chain_counter(0); in test_chain_promotion()
884 (read_sysreg(pmovsclr_el0) == 0x1), in test_chain_promotion()
891 write_regn_el0(pmevtyper, 0, MEM_ACCESS | PMEVTYPER_EXCLUDE_EL0); in test_chain_promotion()
893 write_sysreg_s(0x3, PMCNTENSET_EL0); in test_chain_promotion()
894 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW2_32); in test_chain_promotion()
901 /* 0 becomes CHAINED */ in test_chain_promotion()
902 write_sysreg_s(0x3, PMCNTENCLR_EL0); in test_chain_promotion()
904 write_regn_el0(pmevcntr, 1, 0x0); in test_chain_promotion()
905 enable_chain_counter(0); in test_chain_promotion()
911 (read_sysreg(pmovsclr_el0) == 0x1), in test_chain_promotion()
918 write_regn_el0(pmevtyper, 0, MEM_ACCESS | PMEVTYPER_EXCLUDE_EL0); in test_chain_promotion()
920 write_regn_el0(pmevcntr, 0, PRE_OVERFLOW2_32); in test_chain_promotion()
921 enable_chain_counter(0); in test_chain_promotion()
927 disable_chain_counter(0); in test_chain_promotion()
929 write_sysreg_s(0x3, PMCNTENSET_EL0); in test_chain_promotion()
934 "overflow is expected on counter 0"); in test_chain_promotion()
945 for (i = 0; i < 32; i++) { in expect_interrupts()
957 uint64_t pmcr_lp = overflow_at_64bits ? PMU_PMCR_LP : 0; in test_overflow_interrupt()
973 write_regn_el0(pmevtyper, 0, MEM_ACCESS | PMEVTYPER_EXCLUDE_EL0); in test_overflow_interrupt()
975 write_sysreg_s(0x3, PMCNTENSET_EL0); in test_overflow_interrupt()
976 write_regn_el0(pmevcntr, 0, pre_overflow); in test_overflow_interrupt()
980 /* interrupts are disabled (PMINTENSET_EL1 == 0) */ in test_overflow_interrupt()
983 report(expect_interrupts(0), "no overflow interrupt after preset"); in test_overflow_interrupt()
988 for (i = 0; i < 100; i++) in test_overflow_interrupt()
989 write_sysreg(0x2, pmswinc_el0); in test_overflow_interrupt()
994 report(expect_interrupts(0), "no overflow interrupt after counting"); in test_overflow_interrupt()
1000 write_regn_el0(pmevcntr, 0, pre_overflow); in test_overflow_interrupt()
1011 for (i = 0; i < 100; i++) in test_overflow_interrupt()
1012 write_sysreg(0x3, pmswinc_el0); in test_overflow_interrupt()
1015 report_info("overflow=0x%lx", read_sysreg(pmovsclr_el0)); in test_overflow_interrupt()
1016 report(expect_interrupts(0x3), in test_overflow_interrupt()
1017 "overflow interrupts expected on #0 and #1"); in test_overflow_interrupt()
1031 write_regn_el0(pmevcntr, 0, pre_overflow); in test_overflow_interrupt()
1034 report(expect_interrupts(0x1), "expect overflow interrupt"); in test_overflow_interrupt()
1038 write_regn_el0(pmevcntr, 0, pre_overflow); in test_overflow_interrupt()
1043 report(expect_interrupts(0x1), in test_overflow_interrupt()
1048 report(expect_interrupts(0x3), in test_overflow_interrupt()
1066 set_pmccfiltr(0); /* count cycles in EL0, EL1, but not EL2 */ in check_cycles_increase()
1071 for (int i = 0; i < NR_SAMPLES; i++) { in check_cycles_increase()
1103 assert(num >= 4 && ((num - 2) % 2 == 0)); in measure_instrs()
1121 set_pmccfiltr(0); /* count cycles in EL0, EL1, but not EL2 */ in check_cpi()
1123 if (cpi > 0) in check_cpi()
1128 uint64_t avg, sum = 0; in check_cpi()
1131 for (int j = 0; j < NR_SAMPLES; j++) { in check_cpi()
1134 set_pmccntr(0); in check_cpi()
1142 } else if (cpi > 0 && cycles != i * cpi) { in check_cpi()
1145 } else if ((cycles >> 32) != 0) { in check_cpi()
1170 write_sysreg(0xdead, PMCCNTR64); in pmccntr64_test()
1171 report(read_sysreg(PMCCNTR64) == 0xdead, "pmccntr64"); in pmccntr64_test()
1188 report_info("PMU version: 0x%x", pmu.version); in pmu_probe()
1198 pmu.pmcr_ro = pmcr & 0xFFFFFF00; in pmu_probe()
1230 int cpi = 0; in main()
1242 if (strcmp(argv[1], "cycle-counter") == 0) { in main()
1251 } else if (strcmp(argv[1], "pmu-event-introspection") == 0) { in main()
1255 } else if (strcmp(argv[1], "pmu-event-counter-config") == 0) { in main()
1259 } else if (strcmp(argv[1], "pmu-basic-event-count") == 0) { in main()
1262 } else if (strcmp(argv[1], "pmu-mem-access-reliability") == 0) { in main()
1265 } else if (strcmp(argv[1], "pmu-mem-access") == 0) { in main()
1268 } else if (strcmp(argv[1], "pmu-sw-incr") == 0) { in main()
1271 } else if (strcmp(argv[1], "pmu-chained-counters") == 0) { in main()
1273 } else if (strcmp(argv[1], "pmu-chained-sw-incr") == 0) { in main()
1275 } else if (strcmp(argv[1], "pmu-chain-promotion") == 0) { in main()
1277 } else if (strcmp(argv[1], "pmu-overflow-interrupt") == 0) { in main()