14244065bSChristopher Covington /* 24244065bSChristopher Covington * Test the ARM Performance Monitors Unit (PMU). 34244065bSChristopher Covington * 44244065bSChristopher Covington * Copyright (c) 2015-2016, The Linux Foundation. All rights reserved. 54244065bSChristopher Covington * Copyright (C) 2016, Red Hat Inc, Wei Huang <wei@redhat.com> 64244065bSChristopher Covington * 74244065bSChristopher Covington * This program is free software; you can redistribute it and/or modify it 84244065bSChristopher Covington * under the terms of the GNU Lesser General Public License version 2.1 and 94244065bSChristopher Covington * only version 2.1 as published by the Free Software Foundation. 104244065bSChristopher Covington * 114244065bSChristopher Covington * This program is distributed in the hope that it will be useful, but WITHOUT 124244065bSChristopher Covington * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 134244065bSChristopher Covington * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License 144244065bSChristopher Covington * for more details. 154244065bSChristopher Covington */ 164244065bSChristopher Covington #include "libcflat.h" 174244065bSChristopher Covington #include "asm/barrier.h" 184244065bSChristopher Covington #include "asm/sysreg.h" 194244065bSChristopher Covington #include "asm/processor.h" 204244065bSChristopher Covington 21d81bb7a3SChristopher Covington #define PMU_PMCR_E (1 << 0) 22d81bb7a3SChristopher Covington #define PMU_PMCR_C (1 << 2) 23d81bb7a3SChristopher Covington #define PMU_PMCR_LC (1 << 6) 244244065bSChristopher Covington #define PMU_PMCR_N_SHIFT 11 254244065bSChristopher Covington #define PMU_PMCR_N_MASK 0x1f 264244065bSChristopher Covington #define PMU_PMCR_ID_SHIFT 16 274244065bSChristopher Covington #define PMU_PMCR_ID_MASK 0xff 284244065bSChristopher Covington #define PMU_PMCR_IMP_SHIFT 24 294244065bSChristopher Covington #define PMU_PMCR_IMP_MASK 0xff 304244065bSChristopher Covington 314244065bSChristopher Covington #define ID_DFR0_PERFMON_SHIFT 24 324244065bSChristopher Covington #define ID_DFR0_PERFMON_MASK 0xf 334244065bSChristopher Covington 34d81bb7a3SChristopher Covington #define PMU_CYCLE_IDX 31 35d81bb7a3SChristopher Covington 36d81bb7a3SChristopher Covington #define NR_SAMPLES 10 37d81bb7a3SChristopher Covington 384244065bSChristopher Covington static unsigned int pmu_version; 394244065bSChristopher Covington #if defined(__arm__) 404244065bSChristopher Covington #define PMCR __ACCESS_CP15(c9, 0, c12, 0) 414244065bSChristopher Covington #define ID_DFR0 __ACCESS_CP15(c0, 0, c1, 2) 42d81bb7a3SChristopher Covington #define PMSELR __ACCESS_CP15(c9, 0, c12, 5) 43d81bb7a3SChristopher Covington #define PMXEVTYPER __ACCESS_CP15(c9, 0, c13, 1) 44d81bb7a3SChristopher Covington #define PMCNTENSET __ACCESS_CP15(c9, 0, c12, 1) 45d81bb7a3SChristopher Covington #define PMCCNTR32 __ACCESS_CP15(c9, 0, c13, 0) 46d81bb7a3SChristopher Covington #define PMCCNTR64 __ACCESS_CP15_64(0, c9) 474244065bSChristopher Covington 484244065bSChristopher Covington static inline uint32_t get_id_dfr0(void) { return read_sysreg(ID_DFR0); } 494244065bSChristopher Covington static inline uint32_t get_pmcr(void) { return read_sysreg(PMCR); } 50d81bb7a3SChristopher Covington static inline void set_pmcr(uint32_t v) { write_sysreg(v, PMCR); } 51d81bb7a3SChristopher Covington static inline void set_pmcntenset(uint32_t v) { write_sysreg(v, PMCNTENSET); } 52d81bb7a3SChristopher Covington 53d81bb7a3SChristopher Covington static inline uint64_t get_pmccntr(void) 54d81bb7a3SChristopher Covington { 55d81bb7a3SChristopher Covington if (pmu_version == 0x3) 56d81bb7a3SChristopher Covington return read_sysreg(PMCCNTR64); 57d81bb7a3SChristopher Covington else 58d81bb7a3SChristopher Covington return read_sysreg(PMCCNTR32); 59d81bb7a3SChristopher Covington } 60d81bb7a3SChristopher Covington 61*8f76a347SChristopher Covington static inline void set_pmccntr(uint64_t value) 62*8f76a347SChristopher Covington { 63*8f76a347SChristopher Covington if (pmu_version == 0x3) 64*8f76a347SChristopher Covington write_sysreg(value, PMCCNTR64); 65*8f76a347SChristopher Covington else 66*8f76a347SChristopher Covington write_sysreg(value & 0xffffffff, PMCCNTR32); 67*8f76a347SChristopher Covington } 68*8f76a347SChristopher Covington 69d81bb7a3SChristopher Covington /* PMCCFILTR is an obsolete name for PMXEVTYPER31 in ARMv7 */ 70d81bb7a3SChristopher Covington static inline void set_pmccfiltr(uint32_t value) 71d81bb7a3SChristopher Covington { 72d81bb7a3SChristopher Covington write_sysreg(PMU_CYCLE_IDX, PMSELR); 73d81bb7a3SChristopher Covington write_sysreg(value, PMXEVTYPER); 74d81bb7a3SChristopher Covington isb(); 75d81bb7a3SChristopher Covington } 76*8f76a347SChristopher Covington 77*8f76a347SChristopher Covington /* 78*8f76a347SChristopher Covington * Extra instructions inserted by the compiler would be difficult to compensate 79*8f76a347SChristopher Covington * for, so hand assemble everything between, and including, the PMCR accesses 80*8f76a347SChristopher Covington * to start and stop counting. isb instructions were inserted to make sure 81*8f76a347SChristopher Covington * pmccntr read after this function returns the exact instructions executed in 82*8f76a347SChristopher Covington * the controlled block. Total instrs = isb + mcr + 2*loop = 2 + 2*loop. 83*8f76a347SChristopher Covington */ 84*8f76a347SChristopher Covington static inline void precise_instrs_loop(int loop, uint32_t pmcr) 85*8f76a347SChristopher Covington { 86*8f76a347SChristopher Covington asm volatile( 87*8f76a347SChristopher Covington " mcr p15, 0, %[pmcr], c9, c12, 0\n" 88*8f76a347SChristopher Covington " isb\n" 89*8f76a347SChristopher Covington "1: subs %[loop], %[loop], #1\n" 90*8f76a347SChristopher Covington " bgt 1b\n" 91*8f76a347SChristopher Covington " mcr p15, 0, %[z], c9, c12, 0\n" 92*8f76a347SChristopher Covington " isb\n" 93*8f76a347SChristopher Covington : [loop] "+r" (loop) 94*8f76a347SChristopher Covington : [pmcr] "r" (pmcr), [z] "r" (0) 95*8f76a347SChristopher Covington : "cc"); 96*8f76a347SChristopher Covington } 974244065bSChristopher Covington #elif defined(__aarch64__) 984244065bSChristopher Covington static inline uint32_t get_id_dfr0(void) { return read_sysreg(id_dfr0_el1); } 994244065bSChristopher Covington static inline uint32_t get_pmcr(void) { return read_sysreg(pmcr_el0); } 100d81bb7a3SChristopher Covington static inline void set_pmcr(uint32_t v) { write_sysreg(v, pmcr_el0); } 101d81bb7a3SChristopher Covington static inline uint64_t get_pmccntr(void) { return read_sysreg(pmccntr_el0); } 102*8f76a347SChristopher Covington static inline void set_pmccntr(uint64_t v) { write_sysreg(v, pmccntr_el0); } 103d81bb7a3SChristopher Covington static inline void set_pmcntenset(uint32_t v) { write_sysreg(v, pmcntenset_el0); } 104d81bb7a3SChristopher Covington static inline void set_pmccfiltr(uint32_t v) { write_sysreg(v, pmccfiltr_el0); } 105*8f76a347SChristopher Covington 106*8f76a347SChristopher Covington /* 107*8f76a347SChristopher Covington * Extra instructions inserted by the compiler would be difficult to compensate 108*8f76a347SChristopher Covington * for, so hand assemble everything between, and including, the PMCR accesses 109*8f76a347SChristopher Covington * to start and stop counting. isb instructions are inserted to make sure 110*8f76a347SChristopher Covington * pmccntr read after this function returns the exact instructions executed 111*8f76a347SChristopher Covington * in the controlled block. Total instrs = isb + msr + 2*loop = 2 + 2*loop. 112*8f76a347SChristopher Covington */ 113*8f76a347SChristopher Covington static inline void precise_instrs_loop(int loop, uint32_t pmcr) 114*8f76a347SChristopher Covington { 115*8f76a347SChristopher Covington asm volatile( 116*8f76a347SChristopher Covington " msr pmcr_el0, %[pmcr]\n" 117*8f76a347SChristopher Covington " isb\n" 118*8f76a347SChristopher Covington "1: subs %[loop], %[loop], #1\n" 119*8f76a347SChristopher Covington " b.gt 1b\n" 120*8f76a347SChristopher Covington " msr pmcr_el0, xzr\n" 121*8f76a347SChristopher Covington " isb\n" 122*8f76a347SChristopher Covington : [loop] "+r" (loop) 123*8f76a347SChristopher Covington : [pmcr] "r" (pmcr) 124*8f76a347SChristopher Covington : "cc"); 125*8f76a347SChristopher Covington } 1264244065bSChristopher Covington #endif 1274244065bSChristopher Covington 1284244065bSChristopher Covington /* 1294244065bSChristopher Covington * As a simple sanity check on the PMCR_EL0, ensure the implementer field isn't 1304244065bSChristopher Covington * null. Also print out a couple other interesting fields for diagnostic 1314244065bSChristopher Covington * purposes. For example, as of fall 2016, QEMU TCG mode doesn't implement 1324244065bSChristopher Covington * event counters and therefore reports zero event counters, but hopefully 1334244065bSChristopher Covington * support for at least the instructions event will be added in the future and 1344244065bSChristopher Covington * the reported number of event counters will become nonzero. 1354244065bSChristopher Covington */ 1364244065bSChristopher Covington static bool check_pmcr(void) 1374244065bSChristopher Covington { 1384244065bSChristopher Covington uint32_t pmcr; 1394244065bSChristopher Covington 1404244065bSChristopher Covington pmcr = get_pmcr(); 1414244065bSChristopher Covington 1424244065bSChristopher Covington report_info("PMU implementer/ID code/counters: 0x%x(\"%c\")/0x%x/%d", 1434244065bSChristopher Covington (pmcr >> PMU_PMCR_IMP_SHIFT) & PMU_PMCR_IMP_MASK, 1444244065bSChristopher Covington ((pmcr >> PMU_PMCR_IMP_SHIFT) & PMU_PMCR_IMP_MASK) ? : ' ', 1454244065bSChristopher Covington (pmcr >> PMU_PMCR_ID_SHIFT) & PMU_PMCR_ID_MASK, 1464244065bSChristopher Covington (pmcr >> PMU_PMCR_N_SHIFT) & PMU_PMCR_N_MASK); 1474244065bSChristopher Covington 1484244065bSChristopher Covington return ((pmcr >> PMU_PMCR_IMP_SHIFT) & PMU_PMCR_IMP_MASK) != 0; 1494244065bSChristopher Covington } 1504244065bSChristopher Covington 151d81bb7a3SChristopher Covington /* 152d81bb7a3SChristopher Covington * Ensure that the cycle counter progresses between back-to-back reads. 153d81bb7a3SChristopher Covington */ 154d81bb7a3SChristopher Covington static bool check_cycles_increase(void) 155d81bb7a3SChristopher Covington { 156d81bb7a3SChristopher Covington bool success = true; 157d81bb7a3SChristopher Covington 158d81bb7a3SChristopher Covington /* init before event access, this test only cares about cycle count */ 159d81bb7a3SChristopher Covington set_pmcntenset(1 << PMU_CYCLE_IDX); 160d81bb7a3SChristopher Covington set_pmccfiltr(0); /* count cycles in EL0, EL1, but not EL2 */ 161d81bb7a3SChristopher Covington 162d81bb7a3SChristopher Covington set_pmcr(get_pmcr() | PMU_PMCR_LC | PMU_PMCR_C | PMU_PMCR_E); 163d81bb7a3SChristopher Covington 164d81bb7a3SChristopher Covington for (int i = 0; i < NR_SAMPLES; i++) { 165d81bb7a3SChristopher Covington uint64_t a, b; 166d81bb7a3SChristopher Covington 167d81bb7a3SChristopher Covington a = get_pmccntr(); 168d81bb7a3SChristopher Covington b = get_pmccntr(); 169d81bb7a3SChristopher Covington 170d81bb7a3SChristopher Covington if (a >= b) { 171d81bb7a3SChristopher Covington printf("Read %"PRId64" then %"PRId64".\n", a, b); 172d81bb7a3SChristopher Covington success = false; 173d81bb7a3SChristopher Covington break; 174d81bb7a3SChristopher Covington } 175d81bb7a3SChristopher Covington } 176d81bb7a3SChristopher Covington 177d81bb7a3SChristopher Covington set_pmcr(get_pmcr() & ~PMU_PMCR_E); 178d81bb7a3SChristopher Covington 179d81bb7a3SChristopher Covington return success; 180d81bb7a3SChristopher Covington } 181d81bb7a3SChristopher Covington 182*8f76a347SChristopher Covington /* 183*8f76a347SChristopher Covington * Execute a known number of guest instructions. Only even instruction counts 184*8f76a347SChristopher Covington * greater than or equal to 4 are supported by the in-line assembly code. The 185*8f76a347SChristopher Covington * control register (PMCR_EL0) is initialized with the provided value (allowing 186*8f76a347SChristopher Covington * for example for the cycle counter or event counters to be reset). At the end 187*8f76a347SChristopher Covington * of the exact instruction loop, zero is written to PMCR_EL0 to disable 188*8f76a347SChristopher Covington * counting, allowing the cycle counter or event counters to be read at the 189*8f76a347SChristopher Covington * leisure of the calling code. 190*8f76a347SChristopher Covington */ 191*8f76a347SChristopher Covington static void measure_instrs(int num, uint32_t pmcr) 192*8f76a347SChristopher Covington { 193*8f76a347SChristopher Covington int loop = (num - 2) / 2; 194*8f76a347SChristopher Covington 195*8f76a347SChristopher Covington assert(num >= 4 && ((num - 2) % 2 == 0)); 196*8f76a347SChristopher Covington precise_instrs_loop(loop, pmcr); 197*8f76a347SChristopher Covington } 198*8f76a347SChristopher Covington 199*8f76a347SChristopher Covington /* 200*8f76a347SChristopher Covington * Measure cycle counts for various known instruction counts. Ensure that the 201*8f76a347SChristopher Covington * cycle counter progresses (similar to check_cycles_increase() but with more 202*8f76a347SChristopher Covington * instructions and using reset and stop controls). If supplied a positive, 203*8f76a347SChristopher Covington * nonzero CPI parameter, it also strictly checks that every measurement matches 204*8f76a347SChristopher Covington * it. Strict CPI checking is used to test -icount mode. 205*8f76a347SChristopher Covington */ 206*8f76a347SChristopher Covington static bool check_cpi(int cpi) 207*8f76a347SChristopher Covington { 208*8f76a347SChristopher Covington uint32_t pmcr = get_pmcr() | PMU_PMCR_LC | PMU_PMCR_C | PMU_PMCR_E; 209*8f76a347SChristopher Covington 210*8f76a347SChristopher Covington /* init before event access, this test only cares about cycle count */ 211*8f76a347SChristopher Covington set_pmcntenset(1 << PMU_CYCLE_IDX); 212*8f76a347SChristopher Covington set_pmccfiltr(0); /* count cycles in EL0, EL1, but not EL2 */ 213*8f76a347SChristopher Covington 214*8f76a347SChristopher Covington if (cpi > 0) 215*8f76a347SChristopher Covington printf("Checking for CPI=%d.\n", cpi); 216*8f76a347SChristopher Covington printf("instrs : cycles0 cycles1 ...\n"); 217*8f76a347SChristopher Covington 218*8f76a347SChristopher Covington for (unsigned int i = 4; i < 300; i += 32) { 219*8f76a347SChristopher Covington uint64_t avg, sum = 0; 220*8f76a347SChristopher Covington 221*8f76a347SChristopher Covington printf("%4d:", i); 222*8f76a347SChristopher Covington for (int j = 0; j < NR_SAMPLES; j++) { 223*8f76a347SChristopher Covington uint64_t cycles; 224*8f76a347SChristopher Covington 225*8f76a347SChristopher Covington set_pmccntr(0); 226*8f76a347SChristopher Covington measure_instrs(i, pmcr); 227*8f76a347SChristopher Covington cycles = get_pmccntr(); 228*8f76a347SChristopher Covington printf(" %4"PRId64"", cycles); 229*8f76a347SChristopher Covington 230*8f76a347SChristopher Covington if (!cycles) { 231*8f76a347SChristopher Covington printf("\ncycles not incrementing!\n"); 232*8f76a347SChristopher Covington return false; 233*8f76a347SChristopher Covington } else if (cpi > 0 && cycles != i * cpi) { 234*8f76a347SChristopher Covington printf("\nunexpected cycle count received!\n"); 235*8f76a347SChristopher Covington return false; 236*8f76a347SChristopher Covington } else if ((cycles >> 32) != 0) { 237*8f76a347SChristopher Covington /* The cycles taken by the loop above should 238*8f76a347SChristopher Covington * fit in 32 bits easily. We check the upper 239*8f76a347SChristopher Covington * 32 bits of the cycle counter to make sure 240*8f76a347SChristopher Covington * there is no supprise. */ 241*8f76a347SChristopher Covington printf("\ncycle count bigger than 32bit!\n"); 242*8f76a347SChristopher Covington return false; 243*8f76a347SChristopher Covington } 244*8f76a347SChristopher Covington 245*8f76a347SChristopher Covington sum += cycles; 246*8f76a347SChristopher Covington } 247*8f76a347SChristopher Covington avg = sum / NR_SAMPLES; 248*8f76a347SChristopher Covington printf(" avg=%-4"PRId64" %s=%-3"PRId64"\n", avg, 249*8f76a347SChristopher Covington (avg >= i) ? "cpi" : "ipc", 250*8f76a347SChristopher Covington (avg >= i) ? avg / i : i / avg); 251*8f76a347SChristopher Covington } 252*8f76a347SChristopher Covington 253*8f76a347SChristopher Covington return true; 254*8f76a347SChristopher Covington } 255*8f76a347SChristopher Covington 2564244065bSChristopher Covington /* Return FALSE if no PMU found, otherwise return TRUE */ 2574244065bSChristopher Covington bool pmu_probe(void) 2584244065bSChristopher Covington { 2594244065bSChristopher Covington uint32_t dfr0; 2604244065bSChristopher Covington 2614244065bSChristopher Covington /* probe pmu version */ 2624244065bSChristopher Covington dfr0 = get_id_dfr0(); 2634244065bSChristopher Covington pmu_version = (dfr0 >> ID_DFR0_PERFMON_SHIFT) & ID_DFR0_PERFMON_MASK; 2644244065bSChristopher Covington 2654244065bSChristopher Covington if (pmu_version) 2664244065bSChristopher Covington report_info("PMU version: %d", pmu_version); 2674244065bSChristopher Covington 2684244065bSChristopher Covington return pmu_version; 2694244065bSChristopher Covington } 2704244065bSChristopher Covington 271*8f76a347SChristopher Covington int main(int argc, char *argv[]) 2724244065bSChristopher Covington { 273*8f76a347SChristopher Covington int cpi = 0; 274*8f76a347SChristopher Covington 275*8f76a347SChristopher Covington if (argc > 1) 276*8f76a347SChristopher Covington cpi = atol(argv[1]); 277*8f76a347SChristopher Covington 2784244065bSChristopher Covington if (!pmu_probe()) { 2794244065bSChristopher Covington printf("No PMU found, test skipped...\n"); 2804244065bSChristopher Covington return report_summary(); 2814244065bSChristopher Covington } 2824244065bSChristopher Covington 2834244065bSChristopher Covington report_prefix_push("pmu"); 2844244065bSChristopher Covington 2854244065bSChristopher Covington report("Control register", check_pmcr()); 286d81bb7a3SChristopher Covington report("Monotonically increasing cycle count", check_cycles_increase()); 287*8f76a347SChristopher Covington report("Cycle/instruction ratio", check_cpi(cpi)); 2884244065bSChristopher Covington 2894244065bSChristopher Covington return report_summary(); 2904244065bSChristopher Covington } 291