Lines Matching +full:1 +full:c

26  [ C(L1D) ] = {
27 [ C(OP_READ) ] = {
28 [ C(RESULT_ACCESS) ] = 0x0040, /* Data Cache Accesses */
29 [ C(RESULT_MISS) ] = 0x0141, /* Data Cache Misses */
31 [ C(OP_WRITE) ] = {
32 [ C(RESULT_ACCESS) ] = 0,
33 [ C(RESULT_MISS) ] = 0,
35 [ C(OP_PREFETCH) ] = {
36 [ C(RESULT_ACCESS) ] = 0x0267, /* Data Prefetcher :attempts */
37 [ C(RESULT_MISS) ] = 0x0167, /* Data Prefetcher :cancelled */
40 [ C(L1I ) ] = {
41 [ C(OP_READ) ] = {
42 [ C(RESULT_ACCESS) ] = 0x0080, /* Instruction cache fetches */
43 [ C(RESULT_MISS) ] = 0x0081, /* Instruction cache misses */
45 [ C(OP_WRITE) ] = {
46 [ C(RESULT_ACCESS) ] = -1,
47 [ C(RESULT_MISS) ] = -1,
49 [ C(OP_PREFETCH) ] = {
50 [ C(RESULT_ACCESS) ] = 0x014B, /* Prefetch Instructions :Load */
51 [ C(RESULT_MISS) ] = 0,
54 [ C(LL ) ] = {
55 [ C(OP_READ) ] = {
56 [ C(RESULT_ACCESS) ] = 0x037D, /* Requests to L2 Cache :IC+DC */
57 [ C(RESULT_MISS) ] = 0x037E, /* L2 Cache Misses : IC+DC */
59 [ C(OP_WRITE) ] = {
60 [ C(RESULT_ACCESS) ] = 0x017F, /* L2 Fill/Writeback */
61 [ C(RESULT_MISS) ] = 0,
63 [ C(OP_PREFETCH) ] = {
64 [ C(RESULT_ACCESS) ] = 0,
65 [ C(RESULT_MISS) ] = 0,
68 [ C(DTLB) ] = {
69 [ C(OP_READ) ] = {
70 [ C(RESULT_ACCESS) ] = 0x0040, /* Data Cache Accesses */
71 [ C(RESULT_MISS) ] = 0x0746, /* L1_DTLB_AND_L2_DLTB_MISS.ALL */
73 [ C(OP_WRITE) ] = {
74 [ C(RESULT_ACCESS) ] = 0,
75 [ C(RESULT_MISS) ] = 0,
77 [ C(OP_PREFETCH) ] = {
78 [ C(RESULT_ACCESS) ] = 0,
79 [ C(RESULT_MISS) ] = 0,
82 [ C(ITLB) ] = {
83 [ C(OP_READ) ] = {
84 [ C(RESULT_ACCESS) ] = 0x0080, /* Instruction fecthes */
85 [ C(RESULT_MISS) ] = 0x0385, /* L1_ITLB_AND_L2_ITLB_MISS.ALL */
87 [ C(OP_WRITE) ] = {
88 [ C(RESULT_ACCESS) ] = -1,
89 [ C(RESULT_MISS) ] = -1,
91 [ C(OP_PREFETCH) ] = {
92 [ C(RESULT_ACCESS) ] = -1,
93 [ C(RESULT_MISS) ] = -1,
96 [ C(BPU ) ] = {
97 [ C(OP_READ) ] = {
98 [ C(RESULT_ACCESS) ] = 0x00c2, /* Retired Branch Instr. */
99 [ C(RESULT_MISS) ] = 0x00c3, /* Retired Mispredicted BI */
101 [ C(OP_WRITE) ] = {
102 [ C(RESULT_ACCESS) ] = -1,
103 [ C(RESULT_MISS) ] = -1,
105 [ C(OP_PREFETCH) ] = {
106 [ C(RESULT_ACCESS) ] = -1,
107 [ C(RESULT_MISS) ] = -1,
110 [ C(NODE) ] = {
111 [ C(OP_READ) ] = {
112 [ C(RESULT_ACCESS) ] = 0xb8e9, /* CPU Request to Memory, l+r */
113 [ C(RESULT_MISS) ] = 0x98e9, /* CPU Request to Memory, r */
115 [ C(OP_WRITE) ] = {
116 [ C(RESULT_ACCESS) ] = -1,
117 [ C(RESULT_MISS) ] = -1,
119 [ C(OP_PREFETCH) ] = {
120 [ C(RESULT_ACCESS) ] = -1,
121 [ C(RESULT_MISS) ] = -1,
130 [C(L1D)] = {
131 [C(OP_READ)] = {
132 [C(RESULT_ACCESS)] = 0x0040, /* Data Cache Accesses */
133 [C(RESULT_MISS)] = 0xc860, /* L2$ access from DC Miss */
135 [C(OP_WRITE)] = {
136 [C(RESULT_ACCESS)] = 0,
137 [C(RESULT_MISS)] = 0,
139 [C(OP_PREFETCH)] = {
140 [C(RESULT_ACCESS)] = 0xff5a, /* h/w prefetch DC Fills */
141 [C(RESULT_MISS)] = 0,
144 [C(L1I)] = {
145 [C(OP_READ)] = {
146 [C(RESULT_ACCESS)] = 0x0080, /* Instruction cache fetches */
147 [C(RESULT_MISS)] = 0x0081, /* Instruction cache misses */
149 [C(OP_WRITE)] = {
150 [C(RESULT_ACCESS)] = -1,
151 [C(RESULT_MISS)] = -1,
153 [C(OP_PREFETCH)] = {
154 [C(RESULT_ACCESS)] = 0,
155 [C(RESULT_MISS)] = 0,
158 [C(LL)] = {
159 [C(OP_READ)] = {
160 [C(RESULT_ACCESS)] = 0,
161 [C(RESULT_MISS)] = 0,
163 [C(OP_WRITE)] = {
164 [C(RESULT_ACCESS)] = 0,
165 [C(RESULT_MISS)] = 0,
167 [C(OP_PREFETCH)] = {
168 [C(RESULT_ACCESS)] = 0,
169 [C(RESULT_MISS)] = 0,
172 [C(DTLB)] = {
173 [C(OP_READ)] = {
174 [C(RESULT_ACCESS)] = 0xff45, /* All L2 DTLB accesses */
175 [C(RESULT_MISS)] = 0xf045, /* L2 DTLB misses (PT walks) */
177 [C(OP_WRITE)] = {
178 [C(RESULT_ACCESS)] = 0,
179 [C(RESULT_MISS)] = 0,
181 [C(OP_PREFETCH)] = {
182 [C(RESULT_ACCESS)] = 0,
183 [C(RESULT_MISS)] = 0,
186 [C(ITLB)] = {
187 [C(OP_READ)] = {
188 [C(RESULT_ACCESS)] = 0x0084, /* L1 ITLB misses, L2 ITLB hits */
189 [C(RESULT_MISS)] = 0xff85, /* L1 ITLB misses, L2 misses */
191 [C(OP_WRITE)] = {
192 [C(RESULT_ACCESS)] = -1,
193 [C(RESULT_MISS)] = -1,
195 [C(OP_PREFETCH)] = {
196 [C(RESULT_ACCESS)] = -1,
197 [C(RESULT_MISS)] = -1,
200 [C(BPU)] = {
201 [C(OP_READ)] = {
202 [C(RESULT_ACCESS)] = 0x00c2, /* Retired Branch Instr. */
203 [C(RESULT_MISS)] = 0x00c3, /* Retired Mispredicted BI */
205 [C(OP_WRITE)] = {
206 [C(RESULT_ACCESS)] = -1,
207 [C(RESULT_MISS)] = -1,
209 [C(OP_PREFETCH)] = {
210 [C(RESULT_ACCESS)] = -1,
211 [C(RESULT_MISS)] = -1,
214 [C(NODE)] = {
215 [C(OP_READ)] = {
216 [C(RESULT_ACCESS)] = 0,
217 [C(RESULT_MISS)] = 0,
219 [C(OP_WRITE)] = {
220 [C(RESULT_ACCESS)] = -1,
221 [C(RESULT_MISS)] = -1,
223 [C(OP_PREFETCH)] = {
224 [C(RESULT_ACCESS)] = -1,
225 [C(RESULT_MISS)] = -1,
276 * 4 counters starting at 0xc0010000 each offset by 1
299 offset = index << 1; in amd_pmu_addr_offset()
332 * When HO == GO == 1 the hardware treats that as GO == HO == 0 in amd_core_hw_config()
358 return nb && nb->nb_id != -1; in amd_has_nb()
440 struct event_constraint *c) in __amd_get_nb_event_constraints() argument
445 int idx, new = -1; in __amd_get_nb_event_constraints()
447 if (!c) in __amd_get_nb_event_constraints()
448 c = &unconstrained; in __amd_get_nb_event_constraints()
451 return c; in __amd_get_nb_event_constraints()
463 for_each_set_bit(idx, c->idxmsk, x86_pmu.num_counters) { in __amd_get_nb_event_constraints()
464 if (new == -1 || hwc->idx == idx) in __amd_get_nb_event_constraints()
477 if (new != -1) in __amd_get_nb_event_constraints()
486 if (new == -1) in __amd_get_nb_event_constraints()
501 nb->nb_id = -1; in amd_alloc_nb()
508 nb->event_constraints[i].weight = 1; in amd_alloc_nb()
572 if (nb->nb_id == -1 || --nb->refcnt == 0) in amd_pmu_cpu_dead()
600 if (counter & (1ULL << (x86_pmu.cntval_bits - 1))) in amd_pmu_wait_on_overflow()
604 udelay(1); in amd_pmu_wait_on_overflow()
821 if (hweight_long(hwc->config & ARCH_PERFMON_EVENTSEL_UMASK) <= 1) in amd_get_event_constraints_f15h()
846 if (hweight_long(hwc->config & ARCH_PERFMON_EVENTSEL_UMASK) <= 1) in amd_get_event_constraints_f15h()
870 /* moved to uncore.c */ in amd_get_event_constraints_f15h()
924 .cntval_mask = (1ULL << 48) - 1,
925 .apic = 1,
927 .max_period = (1ULL << 47) - 1,
938 .amd_nb_constraints = 1,
962 * the amd/uncore.c driver. in amd_core_pmu_init()
978 for (i = 0; i < x86_pmu.num_counters - 1; i += 2) in amd_core_pmu_init()
979 even_ctr_mask |= 1 << i; in amd_core_pmu_init()
1010 if (num_possible_cpus() == 1) { in amd_pmu_init()