Lines Matching +full:0 +full:x0
66 GP = 0,
71 0x00c4, /* PERF_COUNT_HW_BRANCH_INSTRUCTIONS */
72 0x00c5, /* PERF_COUNT_HW_BRANCH_MISSES */
73 0x0300, /* PERF_COUNT_HW_REF_CPU_CYCLES */
74 0x003c, /* PERF_COUNT_HW_CPU_CYCLES */
75 0x00c0, /* PERF_COUNT_HW_INSTRUCTIONS */
76 0x013c, /* PERF_COUNT_HW_BUS_CYCLES */
77 0x4f2e, /* PERF_COUNT_HW_CACHE_REFERENCES */
78 0x412e, /* PERF_COUNT_HW_CACHE_MISSES */
84 0,
85 0xfffffffffff0,
87 0xfffffffffffe,
89 0xffffffffffff,
114 apic_write(APIC_EOI, 0); in cnt_overflow()
120 "mov $0x0, %%eax\n" in workload()
121 "cmp $0x0, %%eax\n" in workload()
126 "mov $0x0, %%eax\n" in workload()
127 "cmp $0x0, %%eax\n" in workload()
132 "mov $0xa, %%eax\n" in workload()
134 "mov $0xa, %%eax\n" in workload()
136 "mov $0xa, %%eax\n" in workload()
138 "mov $0xa, %%eax\n" in workload()
140 "mov $0xa, %%eax\n" in workload()
142 "mov $0xa, %%eax\n" in workload()
153 "mov $0x0, %%eax\n" in workload2()
154 "cmp $0x0, %%eax\n" in workload2()
159 "mov $0x0, %%eax\n" in workload2()
160 "cmp $0x0, %%eax\n" in workload2()
165 "mov $0xa, %%eax\n" in workload2()
167 "mov $0xa, %%eax\n" in workload2()
169 "mov $0xa, %%eax\n" in workload2()
171 "mov $0xa, %%eax\n" in workload2()
173 "mov $0xa, %%eax\n" in workload2()
175 "mov $0xa, %%eax\n" in workload2()
187 memset(ds_bufer, 0x0, PAGE_SIZE); in alloc_buffers()
191 memset(pebs_buffer, 0x0, PAGE_SIZE); in alloc_buffers()
206 u64 adaptive_ctrl = 0, fixed_ctr_ctrl = 0; in pebs_enable()
218 for (idx = 0; idx < pmu.nr_fixed_counters; idx++) { in pebs_enable()
224 fixed_ctr_ctrl |= (0xbULL << (idx * 4) | adaptive_ctrl); in pebs_enable()
229 for (idx = 0; idx < max_nr_gp_events; idx++) { in pebs_enable()
246 memset(ds_bufer, 0x0, PAGE_SIZE); in reset_pebs()
247 memset(pebs_buffer, 0x0, PAGE_SIZE); in reset_pebs()
248 wrmsr(MSR_IA32_PEBS_ENABLE, 0); in reset_pebs()
249 wrmsr(MSR_IA32_DS_AREA, 0); in reset_pebs()
251 wrmsr(MSR_PEBS_DATA_CFG, 0); in reset_pebs()
253 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0); in reset_pebs()
267 wrmsr(MSR_IA32_PEBS_ENABLE, 0); in pebs_disable()
269 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0); in pebs_disable()
277 unsigned int count = 0; in check_pebs_records()
299 data_cfg_match = (pebs_rec->format_size & GENMASK_ULL(47, 0)) == pebs_data_cfg; in check_pebs_records()
300 data_cfg_match = (pebs_rec->format_size & GENMASK_ULL(47, 0)) == in check_pebs_records()
301 (use_adaptive ? pebs_data_cfg : 0); in check_pebs_records()
310 for (i = 0; i < MAX_NUM_LBR_ENTRY; i++) { in check_pebs_records()
324 printf("FAIL: The applicable_counters (0x%lx) doesn't match with pmc_bitmask (0x%lx).\n", in check_pebs_records()
330 …printf("FAIL: The pebs_data_cfg (0x%lx) doesn't match with the effective MSR_PEBS_DATA_CFG (0x%lx)… in check_pebs_records()
331 pebs_rec->format_size & 0xffffffffffff, use_adaptive ? pebs_data_cfg : 0); in check_pebs_records()
340 report_prefix_pushf("%s counter %d (0x%lx)", in check_one_counter()
357 pebs_disable(0); in check_multiple_counters()
364 u64 bitmask = 0; in check_pebs_counters()
366 for (idx = 0; has_baseline && idx < pmu.nr_fixed_counters; idx++) in check_pebs_counters()
369 for (idx = 0; idx < max_nr_gp_events; idx++) in check_pebs_counters()
372 for (idx = 0; has_baseline && idx < pmu.nr_fixed_counters; idx++) in check_pebs_counters()
374 for (idx = 0; idx < max_nr_gp_events; idx += 2) in check_pebs_counters()
376 report_prefix_pushf("Multiple (0x%lx)", bitmask); in check_pebs_counters()
423 for (i = 0; i < ARRAY_SIZE(counter_start_values); i++) { in main()
425 check_pebs_counters(0, false); in main()
429 for (j = 0; j <= PEBS_DATACFG_MASK; j++) { in main()
435 report_prefix_pushf("Adaptive (0x%lx)", pebs_data_cfg); in main()
439 report_prefix_pushf("Ignored Adaptive (0x%lx)", pebs_data_cfg); in main()