1 /*
2 * Helpers for TLBI insns
3 *
4 * This code is licensed under the GNU GPL v2 or later.
5 *
6 * SPDX-License-Identifier: GPL-2.0-or-later
7 */
8 #include "qemu/osdep.h"
9 #include "qemu/log.h"
10 #include "exec/cputlb.h"
11 #include "exec/target_page.h"
12 #include "cpu.h"
13 #include "internals.h"
14 #include "cpu-features.h"
15 #include "cpregs.h"
16
17 /* Check for traps from EL1 due to HCR_EL2.TTLB. */
access_ttlb(CPUARMState * env,const ARMCPRegInfo * ri,bool isread)18 static CPAccessResult access_ttlb(CPUARMState *env, const ARMCPRegInfo *ri,
19 bool isread)
20 {
21 if (arm_current_el(env) == 1 && (arm_hcr_el2_eff(env) & HCR_TTLB)) {
22 return CP_ACCESS_TRAP_EL2;
23 }
24 return CP_ACCESS_OK;
25 }
26
27 /* Check for traps from EL1 due to HCR_EL2.TTLB or TTLBIS. */
access_ttlbis(CPUARMState * env,const ARMCPRegInfo * ri,bool isread)28 static CPAccessResult access_ttlbis(CPUARMState *env, const ARMCPRegInfo *ri,
29 bool isread)
30 {
31 if (arm_current_el(env) == 1 &&
32 (arm_hcr_el2_eff(env) & (HCR_TTLB | HCR_TTLBIS))) {
33 return CP_ACCESS_TRAP_EL2;
34 }
35 return CP_ACCESS_OK;
36 }
37
38 /* Check for traps from EL1 due to HCR_EL2.TTLB or TTLBOS. */
access_ttlbos(CPUARMState * env,const ARMCPRegInfo * ri,bool isread)39 static CPAccessResult access_ttlbos(CPUARMState *env, const ARMCPRegInfo *ri,
40 bool isread)
41 {
42 if (arm_current_el(env) == 1 &&
43 (arm_hcr_el2_eff(env) & (HCR_TTLB | HCR_TTLBOS))) {
44 return CP_ACCESS_TRAP_EL2;
45 }
46 return CP_ACCESS_OK;
47 }
48
49 /* IS variants of TLB operations must affect all cores */
tlbiall_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)50 static void tlbiall_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
51 uint64_t value)
52 {
53 CPUState *cs = env_cpu(env);
54
55 tlb_flush_all_cpus_synced(cs);
56 }
57
tlbiasid_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)58 static void tlbiasid_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
59 uint64_t value)
60 {
61 CPUState *cs = env_cpu(env);
62
63 tlb_flush_all_cpus_synced(cs);
64 }
65
tlbimva_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)66 static void tlbimva_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
67 uint64_t value)
68 {
69 CPUState *cs = env_cpu(env);
70
71 tlb_flush_page_all_cpus_synced(cs, value & TARGET_PAGE_MASK);
72 }
73
tlbimvaa_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)74 static void tlbimvaa_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
75 uint64_t value)
76 {
77 CPUState *cs = env_cpu(env);
78
79 tlb_flush_page_all_cpus_synced(cs, value & TARGET_PAGE_MASK);
80 }
81
82 /*
83 * Non-IS variants of TLB operations are upgraded to
84 * IS versions if we are at EL1 and HCR_EL2.FB is effectively set to
85 * force broadcast of these operations.
86 */
tlb_force_broadcast(CPUARMState * env)87 static bool tlb_force_broadcast(CPUARMState *env)
88 {
89 return arm_current_el(env) == 1 && (arm_hcr_el2_eff(env) & HCR_FB);
90 }
91
tlbiall_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)92 static void tlbiall_write(CPUARMState *env, const ARMCPRegInfo *ri,
93 uint64_t value)
94 {
95 /* Invalidate all (TLBIALL) */
96 CPUState *cs = env_cpu(env);
97
98 if (tlb_force_broadcast(env)) {
99 tlb_flush_all_cpus_synced(cs);
100 } else {
101 tlb_flush(cs);
102 }
103 }
104
tlbimva_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)105 static void tlbimva_write(CPUARMState *env, const ARMCPRegInfo *ri,
106 uint64_t value)
107 {
108 /* Invalidate single TLB entry by MVA and ASID (TLBIMVA) */
109 CPUState *cs = env_cpu(env);
110
111 value &= TARGET_PAGE_MASK;
112 if (tlb_force_broadcast(env)) {
113 tlb_flush_page_all_cpus_synced(cs, value);
114 } else {
115 tlb_flush_page(cs, value);
116 }
117 }
118
tlbiasid_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)119 static void tlbiasid_write(CPUARMState *env, const ARMCPRegInfo *ri,
120 uint64_t value)
121 {
122 /* Invalidate by ASID (TLBIASID) */
123 CPUState *cs = env_cpu(env);
124
125 if (tlb_force_broadcast(env)) {
126 tlb_flush_all_cpus_synced(cs);
127 } else {
128 tlb_flush(cs);
129 }
130 }
131
tlbimvaa_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)132 static void tlbimvaa_write(CPUARMState *env, const ARMCPRegInfo *ri,
133 uint64_t value)
134 {
135 /* Invalidate single entry by MVA, all ASIDs (TLBIMVAA) */
136 CPUState *cs = env_cpu(env);
137
138 value &= TARGET_PAGE_MASK;
139 if (tlb_force_broadcast(env)) {
140 tlb_flush_page_all_cpus_synced(cs, value);
141 } else {
142 tlb_flush_page(cs, value);
143 }
144 }
145
tlbimva_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)146 static void tlbimva_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
147 uint64_t value)
148 {
149 CPUState *cs = env_cpu(env);
150 uint64_t pageaddr = value & ~MAKE_64BIT_MASK(0, 12);
151
152 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_E2);
153 }
154
tlbimva_hyp_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)155 static void tlbimva_hyp_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
156 uint64_t value)
157 {
158 CPUState *cs = env_cpu(env);
159 uint64_t pageaddr = value & ~MAKE_64BIT_MASK(0, 12);
160
161 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr,
162 ARMMMUIdxBit_E2);
163 }
164
tlbiipas2_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)165 static void tlbiipas2_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
166 uint64_t value)
167 {
168 CPUState *cs = env_cpu(env);
169 uint64_t pageaddr = (value & MAKE_64BIT_MASK(0, 28)) << 12;
170
171 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_Stage2);
172 }
173
tlbiipas2is_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)174 static void tlbiipas2is_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
175 uint64_t value)
176 {
177 CPUState *cs = env_cpu(env);
178 uint64_t pageaddr = (value & MAKE_64BIT_MASK(0, 28)) << 12;
179
180 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, ARMMMUIdxBit_Stage2);
181 }
182
tlbiall_nsnh_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)183 static void tlbiall_nsnh_write(CPUARMState *env, const ARMCPRegInfo *ri,
184 uint64_t value)
185 {
186 CPUState *cs = env_cpu(env);
187
188 tlb_flush_by_mmuidx(cs, alle1_tlbmask(env));
189 }
190
tlbiall_nsnh_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)191 static void tlbiall_nsnh_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
192 uint64_t value)
193 {
194 CPUState *cs = env_cpu(env);
195
196 tlb_flush_by_mmuidx_all_cpus_synced(cs, alle1_tlbmask(env));
197 }
198
199
tlbiall_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)200 static void tlbiall_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
201 uint64_t value)
202 {
203 CPUState *cs = env_cpu(env);
204
205 tlb_flush_by_mmuidx(cs, ARMMMUIdxBit_E2);
206 }
207
tlbiall_hyp_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)208 static void tlbiall_hyp_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
209 uint64_t value)
210 {
211 CPUState *cs = env_cpu(env);
212
213 tlb_flush_by_mmuidx_all_cpus_synced(cs, ARMMMUIdxBit_E2);
214 }
215
216 /*
217 * See: D4.7.2 TLB maintenance requirements and the TLB maintenance instructions
218 * Page D4-1736 (DDI0487A.b)
219 */
220
vae1_tlbmask(CPUARMState * env)221 static int vae1_tlbmask(CPUARMState *env)
222 {
223 uint64_t hcr = arm_hcr_el2_eff(env);
224 uint16_t mask;
225
226 assert(arm_feature(env, ARM_FEATURE_AARCH64));
227
228 if ((hcr & (HCR_E2H | HCR_TGE)) == (HCR_E2H | HCR_TGE)) {
229 mask = ARMMMUIdxBit_E20_2 |
230 ARMMMUIdxBit_E20_2_PAN |
231 ARMMMUIdxBit_E20_0;
232 } else {
233 /* This is AArch64 only, so we don't need to touch the EL30_x TLBs */
234 mask = ARMMMUIdxBit_E10_1 |
235 ARMMMUIdxBit_E10_1_PAN |
236 ARMMMUIdxBit_E10_0;
237 }
238 return mask;
239 }
240
vae2_tlbmask(CPUARMState * env)241 static int vae2_tlbmask(CPUARMState *env)
242 {
243 uint64_t hcr = arm_hcr_el2_eff(env);
244 uint16_t mask;
245
246 if (hcr & HCR_E2H) {
247 mask = ARMMMUIdxBit_E20_2 |
248 ARMMMUIdxBit_E20_2_PAN |
249 ARMMMUIdxBit_E20_0;
250 } else {
251 mask = ARMMMUIdxBit_E2;
252 }
253 return mask;
254 }
255
256 /* Return 56 if TBI is enabled, 64 otherwise. */
tlbbits_for_regime(CPUARMState * env,ARMMMUIdx mmu_idx,uint64_t addr)257 static int tlbbits_for_regime(CPUARMState *env, ARMMMUIdx mmu_idx,
258 uint64_t addr)
259 {
260 uint64_t tcr = regime_tcr(env, mmu_idx);
261 int tbi = aa64_va_parameter_tbi(tcr, mmu_idx);
262 int select = extract64(addr, 55, 1);
263
264 return (tbi >> select) & 1 ? 56 : 64;
265 }
266
vae1_tlbbits(CPUARMState * env,uint64_t addr)267 static int vae1_tlbbits(CPUARMState *env, uint64_t addr)
268 {
269 uint64_t hcr = arm_hcr_el2_eff(env);
270 ARMMMUIdx mmu_idx;
271
272 assert(arm_feature(env, ARM_FEATURE_AARCH64));
273
274 /* Only the regime of the mmu_idx below is significant. */
275 if ((hcr & (HCR_E2H | HCR_TGE)) == (HCR_E2H | HCR_TGE)) {
276 mmu_idx = ARMMMUIdx_E20_0;
277 } else {
278 mmu_idx = ARMMMUIdx_E10_0;
279 }
280
281 return tlbbits_for_regime(env, mmu_idx, addr);
282 }
283
vae2_tlbbits(CPUARMState * env,uint64_t addr)284 static int vae2_tlbbits(CPUARMState *env, uint64_t addr)
285 {
286 uint64_t hcr = arm_hcr_el2_eff(env);
287 ARMMMUIdx mmu_idx;
288
289 /*
290 * Only the regime of the mmu_idx below is significant.
291 * Regime EL2&0 has two ranges with separate TBI configuration, while EL2
292 * only has one.
293 */
294 if (hcr & HCR_E2H) {
295 mmu_idx = ARMMMUIdx_E20_2;
296 } else {
297 mmu_idx = ARMMMUIdx_E2;
298 }
299
300 return tlbbits_for_regime(env, mmu_idx, addr);
301 }
302
tlbi_aa64_vmalle1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)303 static void tlbi_aa64_vmalle1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
304 uint64_t value)
305 {
306 CPUState *cs = env_cpu(env);
307 int mask = vae1_tlbmask(env);
308
309 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
310 }
311
tlbi_aa64_vmalle1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)312 static void tlbi_aa64_vmalle1_write(CPUARMState *env, const ARMCPRegInfo *ri,
313 uint64_t value)
314 {
315 CPUState *cs = env_cpu(env);
316 int mask = vae1_tlbmask(env);
317
318 if (tlb_force_broadcast(env)) {
319 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
320 } else {
321 tlb_flush_by_mmuidx(cs, mask);
322 }
323 }
324
e2_tlbmask(CPUARMState * env)325 static int e2_tlbmask(CPUARMState *env)
326 {
327 return (ARMMMUIdxBit_E20_0 |
328 ARMMMUIdxBit_E20_2 |
329 ARMMMUIdxBit_E20_2_PAN |
330 ARMMMUIdxBit_E2);
331 }
332
tlbi_aa64_alle1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)333 static void tlbi_aa64_alle1_write(CPUARMState *env, const ARMCPRegInfo *ri,
334 uint64_t value)
335 {
336 CPUState *cs = env_cpu(env);
337 int mask = alle1_tlbmask(env);
338
339 tlb_flush_by_mmuidx(cs, mask);
340 }
341
tlbi_aa64_alle2_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)342 static void tlbi_aa64_alle2_write(CPUARMState *env, const ARMCPRegInfo *ri,
343 uint64_t value)
344 {
345 CPUState *cs = env_cpu(env);
346 int mask = e2_tlbmask(env);
347
348 tlb_flush_by_mmuidx(cs, mask);
349 }
350
tlbi_aa64_alle3_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)351 static void tlbi_aa64_alle3_write(CPUARMState *env, const ARMCPRegInfo *ri,
352 uint64_t value)
353 {
354 ARMCPU *cpu = env_archcpu(env);
355 CPUState *cs = CPU(cpu);
356
357 tlb_flush_by_mmuidx(cs, ARMMMUIdxBit_E3);
358 }
359
tlbi_aa64_alle1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)360 static void tlbi_aa64_alle1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
361 uint64_t value)
362 {
363 CPUState *cs = env_cpu(env);
364 int mask = alle1_tlbmask(env);
365
366 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
367 }
368
tlbi_aa64_alle2is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)369 static void tlbi_aa64_alle2is_write(CPUARMState *env, const ARMCPRegInfo *ri,
370 uint64_t value)
371 {
372 CPUState *cs = env_cpu(env);
373 int mask = e2_tlbmask(env);
374
375 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
376 }
377
tlbi_aa64_alle3is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)378 static void tlbi_aa64_alle3is_write(CPUARMState *env, const ARMCPRegInfo *ri,
379 uint64_t value)
380 {
381 CPUState *cs = env_cpu(env);
382
383 tlb_flush_by_mmuidx_all_cpus_synced(cs, ARMMMUIdxBit_E3);
384 }
385
tlbi_aa64_vae2_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)386 static void tlbi_aa64_vae2_write(CPUARMState *env, const ARMCPRegInfo *ri,
387 uint64_t value)
388 {
389 /*
390 * Invalidate by VA, EL2
391 * Currently handles both VAE2 and VALE2, since we don't support
392 * flush-last-level-only.
393 */
394 CPUState *cs = env_cpu(env);
395 int mask = vae2_tlbmask(env);
396 uint64_t pageaddr = sextract64(value << 12, 0, 56);
397 int bits = vae2_tlbbits(env, pageaddr);
398
399 tlb_flush_page_bits_by_mmuidx(cs, pageaddr, mask, bits);
400 }
401
tlbi_aa64_vae3_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)402 static void tlbi_aa64_vae3_write(CPUARMState *env, const ARMCPRegInfo *ri,
403 uint64_t value)
404 {
405 /*
406 * Invalidate by VA, EL3
407 * Currently handles both VAE3 and VALE3, since we don't support
408 * flush-last-level-only.
409 */
410 ARMCPU *cpu = env_archcpu(env);
411 CPUState *cs = CPU(cpu);
412 uint64_t pageaddr = sextract64(value << 12, 0, 56);
413
414 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_E3);
415 }
416
tlbi_aa64_vae1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)417 static void tlbi_aa64_vae1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
418 uint64_t value)
419 {
420 CPUState *cs = env_cpu(env);
421 int mask = vae1_tlbmask(env);
422 uint64_t pageaddr = sextract64(value << 12, 0, 56);
423 int bits = vae1_tlbbits(env, pageaddr);
424
425 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits);
426 }
427
tlbi_aa64_vae1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)428 static void tlbi_aa64_vae1_write(CPUARMState *env, const ARMCPRegInfo *ri,
429 uint64_t value)
430 {
431 /*
432 * Invalidate by VA, EL1&0 (AArch64 version).
433 * Currently handles all of VAE1, VAAE1, VAALE1 and VALE1,
434 * since we don't support flush-for-specific-ASID-only or
435 * flush-last-level-only.
436 */
437 CPUState *cs = env_cpu(env);
438 int mask = vae1_tlbmask(env);
439 uint64_t pageaddr = sextract64(value << 12, 0, 56);
440 int bits = vae1_tlbbits(env, pageaddr);
441
442 if (tlb_force_broadcast(env)) {
443 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits);
444 } else {
445 tlb_flush_page_bits_by_mmuidx(cs, pageaddr, mask, bits);
446 }
447 }
448
tlbi_aa64_vae2is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)449 static void tlbi_aa64_vae2is_write(CPUARMState *env, const ARMCPRegInfo *ri,
450 uint64_t value)
451 {
452 CPUState *cs = env_cpu(env);
453 int mask = vae2_tlbmask(env);
454 uint64_t pageaddr = sextract64(value << 12, 0, 56);
455 int bits = vae2_tlbbits(env, pageaddr);
456
457 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits);
458 }
459
tlbi_aa64_vae3is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)460 static void tlbi_aa64_vae3is_write(CPUARMState *env, const ARMCPRegInfo *ri,
461 uint64_t value)
462 {
463 CPUState *cs = env_cpu(env);
464 uint64_t pageaddr = sextract64(value << 12, 0, 56);
465 int bits = tlbbits_for_regime(env, ARMMMUIdx_E3, pageaddr);
466
467 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr,
468 ARMMMUIdxBit_E3, bits);
469 }
470
ipas2e1_tlbmask(CPUARMState * env,int64_t value)471 static int ipas2e1_tlbmask(CPUARMState *env, int64_t value)
472 {
473 /*
474 * The MSB of value is the NS field, which only applies if SEL2
475 * is implemented and SCR_EL3.NS is not set (i.e. in secure mode).
476 */
477 return (value >= 0
478 && cpu_isar_feature(aa64_sel2, env_archcpu(env))
479 && arm_is_secure_below_el3(env)
480 ? ARMMMUIdxBit_Stage2_S
481 : ARMMMUIdxBit_Stage2);
482 }
483
tlbi_aa64_ipas2e1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)484 static void tlbi_aa64_ipas2e1_write(CPUARMState *env, const ARMCPRegInfo *ri,
485 uint64_t value)
486 {
487 CPUState *cs = env_cpu(env);
488 int mask = ipas2e1_tlbmask(env, value);
489 uint64_t pageaddr = sextract64(value << 12, 0, 56);
490
491 if (tlb_force_broadcast(env)) {
492 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, mask);
493 } else {
494 tlb_flush_page_by_mmuidx(cs, pageaddr, mask);
495 }
496 }
497
tlbi_aa64_ipas2e1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)498 static void tlbi_aa64_ipas2e1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
499 uint64_t value)
500 {
501 CPUState *cs = env_cpu(env);
502 int mask = ipas2e1_tlbmask(env, value);
503 uint64_t pageaddr = sextract64(value << 12, 0, 56);
504
505 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, mask);
506 }
507
508 static const ARMCPRegInfo tlbi_not_v7_cp_reginfo[] = {
509 /*
510 * MMU TLB control. Note that the wildcarding means we cover not just
511 * the unified TLB ops but also the dside/iside/inner-shareable variants.
512 */
513 { .name = "TLBIALL", .cp = 15, .crn = 8, .crm = CP_ANY,
514 .opc1 = CP_ANY, .opc2 = 0, .access = PL1_W, .writefn = tlbiall_write,
515 .type = ARM_CP_NO_RAW },
516 { .name = "TLBIMVA", .cp = 15, .crn = 8, .crm = CP_ANY,
517 .opc1 = CP_ANY, .opc2 = 1, .access = PL1_W, .writefn = tlbimva_write,
518 .type = ARM_CP_NO_RAW },
519 { .name = "TLBIASID", .cp = 15, .crn = 8, .crm = CP_ANY,
520 .opc1 = CP_ANY, .opc2 = 2, .access = PL1_W, .writefn = tlbiasid_write,
521 .type = ARM_CP_NO_RAW },
522 { .name = "TLBIMVAA", .cp = 15, .crn = 8, .crm = CP_ANY,
523 .opc1 = CP_ANY, .opc2 = 3, .access = PL1_W, .writefn = tlbimvaa_write,
524 .type = ARM_CP_NO_RAW },
525 };
526
527 static const ARMCPRegInfo tlbi_v7_cp_reginfo[] = {
528 /* 32 bit ITLB invalidates */
529 { .name = "ITLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 0,
530 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
531 .writefn = tlbiall_write },
532 { .name = "ITLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 1,
533 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
534 .writefn = tlbimva_write },
535 { .name = "ITLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 2,
536 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
537 .writefn = tlbiasid_write },
538 /* 32 bit DTLB invalidates */
539 { .name = "DTLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 0,
540 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
541 .writefn = tlbiall_write },
542 { .name = "DTLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 1,
543 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
544 .writefn = tlbimva_write },
545 { .name = "DTLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 2,
546 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
547 .writefn = tlbiasid_write },
548 /* 32 bit TLB invalidates */
549 { .name = "TLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 0,
550 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
551 .writefn = tlbiall_write },
552 { .name = "TLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 1,
553 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
554 .writefn = tlbimva_write },
555 { .name = "TLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 2,
556 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
557 .writefn = tlbiasid_write },
558 { .name = "TLBIMVAA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 3,
559 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
560 .writefn = tlbimvaa_write },
561 };
562
563 static const ARMCPRegInfo tlbi_v7mp_cp_reginfo[] = {
564 /* 32 bit TLB invalidates, Inner Shareable */
565 { .name = "TLBIALLIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 0,
566 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
567 .writefn = tlbiall_is_write },
568 { .name = "TLBIMVAIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 1,
569 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
570 .writefn = tlbimva_is_write },
571 { .name = "TLBIASIDIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 2,
572 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
573 .writefn = tlbiasid_is_write },
574 { .name = "TLBIMVAAIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 3,
575 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
576 .writefn = tlbimvaa_is_write },
577 };
578
579 static const ARMCPRegInfo tlbi_v8_cp_reginfo[] = {
580 /* AArch32 TLB invalidate last level of translation table walk */
581 { .name = "TLBIMVALIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 5,
582 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
583 .writefn = tlbimva_is_write },
584 { .name = "TLBIMVAALIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 7,
585 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
586 .writefn = tlbimvaa_is_write },
587 { .name = "TLBIMVAL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 5,
588 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
589 .writefn = tlbimva_write },
590 { .name = "TLBIMVAAL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 7,
591 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
592 .writefn = tlbimvaa_write },
593 { .name = "TLBIMVALH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 5,
594 .type = ARM_CP_NO_RAW, .access = PL2_W,
595 .writefn = tlbimva_hyp_write },
596 { .name = "TLBIMVALHIS",
597 .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 5,
598 .type = ARM_CP_NO_RAW, .access = PL2_W,
599 .writefn = tlbimva_hyp_is_write },
600 { .name = "TLBIIPAS2",
601 .cp = 15, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 1,
602 .type = ARM_CP_NO_RAW, .access = PL2_W,
603 .writefn = tlbiipas2_hyp_write },
604 { .name = "TLBIIPAS2IS",
605 .cp = 15, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 1,
606 .type = ARM_CP_NO_RAW, .access = PL2_W,
607 .writefn = tlbiipas2is_hyp_write },
608 { .name = "TLBIIPAS2L",
609 .cp = 15, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 5,
610 .type = ARM_CP_NO_RAW, .access = PL2_W,
611 .writefn = tlbiipas2_hyp_write },
612 { .name = "TLBIIPAS2LIS",
613 .cp = 15, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 5,
614 .type = ARM_CP_NO_RAW, .access = PL2_W,
615 .writefn = tlbiipas2is_hyp_write },
616 /* AArch64 TLBI operations */
617 { .name = "TLBI_VMALLE1IS", .state = ARM_CP_STATE_AA64,
618 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 0,
619 .access = PL1_W, .accessfn = access_ttlbis,
620 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
621 .fgt = FGT_TLBIVMALLE1IS,
622 .writefn = tlbi_aa64_vmalle1is_write },
623 { .name = "TLBI_VAE1IS", .state = ARM_CP_STATE_AA64,
624 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 1,
625 .access = PL1_W, .accessfn = access_ttlbis,
626 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
627 .fgt = FGT_TLBIVAE1IS,
628 .writefn = tlbi_aa64_vae1is_write },
629 { .name = "TLBI_ASIDE1IS", .state = ARM_CP_STATE_AA64,
630 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 2,
631 .access = PL1_W, .accessfn = access_ttlbis,
632 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
633 .fgt = FGT_TLBIASIDE1IS,
634 .writefn = tlbi_aa64_vmalle1is_write },
635 { .name = "TLBI_VAAE1IS", .state = ARM_CP_STATE_AA64,
636 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 3,
637 .access = PL1_W, .accessfn = access_ttlbis,
638 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
639 .fgt = FGT_TLBIVAAE1IS,
640 .writefn = tlbi_aa64_vae1is_write },
641 { .name = "TLBI_VALE1IS", .state = ARM_CP_STATE_AA64,
642 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 5,
643 .access = PL1_W, .accessfn = access_ttlbis,
644 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
645 .fgt = FGT_TLBIVALE1IS,
646 .writefn = tlbi_aa64_vae1is_write },
647 { .name = "TLBI_VAALE1IS", .state = ARM_CP_STATE_AA64,
648 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 7,
649 .access = PL1_W, .accessfn = access_ttlbis,
650 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
651 .fgt = FGT_TLBIVAALE1IS,
652 .writefn = tlbi_aa64_vae1is_write },
653 { .name = "TLBI_VMALLE1", .state = ARM_CP_STATE_AA64,
654 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 0,
655 .access = PL1_W, .accessfn = access_ttlb,
656 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
657 .fgt = FGT_TLBIVMALLE1,
658 .writefn = tlbi_aa64_vmalle1_write },
659 { .name = "TLBI_VAE1", .state = ARM_CP_STATE_AA64,
660 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 1,
661 .access = PL1_W, .accessfn = access_ttlb,
662 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
663 .fgt = FGT_TLBIVAE1,
664 .writefn = tlbi_aa64_vae1_write },
665 { .name = "TLBI_ASIDE1", .state = ARM_CP_STATE_AA64,
666 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 2,
667 .access = PL1_W, .accessfn = access_ttlb,
668 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
669 .fgt = FGT_TLBIASIDE1,
670 .writefn = tlbi_aa64_vmalle1_write },
671 { .name = "TLBI_VAAE1", .state = ARM_CP_STATE_AA64,
672 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 3,
673 .access = PL1_W, .accessfn = access_ttlb,
674 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
675 .fgt = FGT_TLBIVAAE1,
676 .writefn = tlbi_aa64_vae1_write },
677 { .name = "TLBI_VALE1", .state = ARM_CP_STATE_AA64,
678 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 5,
679 .access = PL1_W, .accessfn = access_ttlb,
680 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
681 .fgt = FGT_TLBIVALE1,
682 .writefn = tlbi_aa64_vae1_write },
683 { .name = "TLBI_VAALE1", .state = ARM_CP_STATE_AA64,
684 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 7,
685 .access = PL1_W, .accessfn = access_ttlb,
686 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
687 .fgt = FGT_TLBIVAALE1,
688 .writefn = tlbi_aa64_vae1_write },
689 { .name = "TLBI_IPAS2E1IS", .state = ARM_CP_STATE_AA64,
690 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 1,
691 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
692 .writefn = tlbi_aa64_ipas2e1is_write },
693 { .name = "TLBI_IPAS2LE1IS", .state = ARM_CP_STATE_AA64,
694 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 5,
695 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
696 .writefn = tlbi_aa64_ipas2e1is_write },
697 { .name = "TLBI_ALLE1IS", .state = ARM_CP_STATE_AA64,
698 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 4,
699 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
700 .writefn = tlbi_aa64_alle1is_write },
701 { .name = "TLBI_VMALLS12E1IS", .state = ARM_CP_STATE_AA64,
702 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 6,
703 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
704 .writefn = tlbi_aa64_alle1is_write },
705 { .name = "TLBI_IPAS2E1", .state = ARM_CP_STATE_AA64,
706 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 1,
707 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
708 .writefn = tlbi_aa64_ipas2e1_write },
709 { .name = "TLBI_IPAS2LE1", .state = ARM_CP_STATE_AA64,
710 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 5,
711 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
712 .writefn = tlbi_aa64_ipas2e1_write },
713 { .name = "TLBI_ALLE1", .state = ARM_CP_STATE_AA64,
714 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 4,
715 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
716 .writefn = tlbi_aa64_alle1_write },
717 { .name = "TLBI_VMALLS12E1", .state = ARM_CP_STATE_AA64,
718 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 6,
719 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
720 .writefn = tlbi_aa64_alle1is_write },
721 };
722
723 static const ARMCPRegInfo tlbi_el2_cp_reginfo[] = {
724 { .name = "TLBIALLNSNH",
725 .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 4,
726 .type = ARM_CP_NO_RAW, .access = PL2_W,
727 .writefn = tlbiall_nsnh_write },
728 { .name = "TLBIALLNSNHIS",
729 .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 4,
730 .type = ARM_CP_NO_RAW, .access = PL2_W,
731 .writefn = tlbiall_nsnh_is_write },
732 { .name = "TLBIALLH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 0,
733 .type = ARM_CP_NO_RAW, .access = PL2_W,
734 .writefn = tlbiall_hyp_write },
735 { .name = "TLBIALLHIS", .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 0,
736 .type = ARM_CP_NO_RAW, .access = PL2_W,
737 .writefn = tlbiall_hyp_is_write },
738 { .name = "TLBIMVAH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 1,
739 .type = ARM_CP_NO_RAW, .access = PL2_W,
740 .writefn = tlbimva_hyp_write },
741 { .name = "TLBIMVAHIS", .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 1,
742 .type = ARM_CP_NO_RAW, .access = PL2_W,
743 .writefn = tlbimva_hyp_is_write },
744 { .name = "TLBI_ALLE2", .state = ARM_CP_STATE_AA64,
745 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 0,
746 .access = PL2_W,
747 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
748 .writefn = tlbi_aa64_alle2_write },
749 { .name = "TLBI_VAE2", .state = ARM_CP_STATE_AA64,
750 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 1,
751 .access = PL2_W,
752 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
753 .writefn = tlbi_aa64_vae2_write },
754 { .name = "TLBI_VALE2", .state = ARM_CP_STATE_AA64,
755 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 5,
756 .access = PL2_W,
757 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
758 .writefn = tlbi_aa64_vae2_write },
759 { .name = "TLBI_ALLE2IS", .state = ARM_CP_STATE_AA64,
760 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 0,
761 .access = PL2_W,
762 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
763 .writefn = tlbi_aa64_alle2is_write },
764 { .name = "TLBI_VAE2IS", .state = ARM_CP_STATE_AA64,
765 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 1,
766 .access = PL2_W,
767 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
768 .writefn = tlbi_aa64_vae2is_write },
769 { .name = "TLBI_VALE2IS", .state = ARM_CP_STATE_AA64,
770 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 5,
771 .access = PL2_W,
772 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
773 .writefn = tlbi_aa64_vae2is_write },
774 };
775
776 static const ARMCPRegInfo tlbi_el3_cp_reginfo[] = {
777 { .name = "TLBI_ALLE3IS", .state = ARM_CP_STATE_AA64,
778 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 0,
779 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
780 .writefn = tlbi_aa64_alle3is_write },
781 { .name = "TLBI_VAE3IS", .state = ARM_CP_STATE_AA64,
782 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 1,
783 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
784 .writefn = tlbi_aa64_vae3is_write },
785 { .name = "TLBI_VALE3IS", .state = ARM_CP_STATE_AA64,
786 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 5,
787 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
788 .writefn = tlbi_aa64_vae3is_write },
789 { .name = "TLBI_ALLE3", .state = ARM_CP_STATE_AA64,
790 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 0,
791 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
792 .writefn = tlbi_aa64_alle3_write },
793 { .name = "TLBI_VAE3", .state = ARM_CP_STATE_AA64,
794 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 1,
795 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
796 .writefn = tlbi_aa64_vae3_write },
797 { .name = "TLBI_VALE3", .state = ARM_CP_STATE_AA64,
798 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 5,
799 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
800 .writefn = tlbi_aa64_vae3_write },
801 };
802
803 typedef struct {
804 uint64_t base;
805 uint64_t length;
806 } TLBIRange;
807
tlbi_range_tg_to_gran_size(int tg)808 static ARMGranuleSize tlbi_range_tg_to_gran_size(int tg)
809 {
810 /*
811 * Note that the TLBI range TG field encoding differs from both
812 * TG0 and TG1 encodings.
813 */
814 switch (tg) {
815 case 1:
816 return Gran4K;
817 case 2:
818 return Gran16K;
819 case 3:
820 return Gran64K;
821 default:
822 return GranInvalid;
823 }
824 }
825
tlbi_aa64_get_range(CPUARMState * env,ARMMMUIdx mmuidx,uint64_t value)826 static TLBIRange tlbi_aa64_get_range(CPUARMState *env, ARMMMUIdx mmuidx,
827 uint64_t value)
828 {
829 unsigned int page_size_granule, page_shift, num, scale, exponent;
830 /* Extract one bit to represent the va selector in use. */
831 uint64_t select = sextract64(value, 36, 1);
832 ARMVAParameters param = aa64_va_parameters(env, select, mmuidx, true, false);
833 TLBIRange ret = { };
834 ARMGranuleSize gran;
835
836 page_size_granule = extract64(value, 46, 2);
837 gran = tlbi_range_tg_to_gran_size(page_size_granule);
838
839 /* The granule encoded in value must match the granule in use. */
840 if (gran != param.gran) {
841 qemu_log_mask(LOG_GUEST_ERROR, "Invalid tlbi page size granule %d\n",
842 page_size_granule);
843 return ret;
844 }
845
846 page_shift = arm_granule_bits(gran);
847 num = extract64(value, 39, 5);
848 scale = extract64(value, 44, 2);
849 exponent = (5 * scale) + 1;
850
851 ret.length = (num + 1) << (exponent + page_shift);
852
853 if (param.select) {
854 ret.base = sextract64(value, 0, 37);
855 } else {
856 ret.base = extract64(value, 0, 37);
857 }
858 if (param.ds) {
859 /*
860 * With DS=1, BaseADDR is always shifted 16 so that it is able
861 * to address all 52 va bits. The input address is perforce
862 * aligned on a 64k boundary regardless of translation granule.
863 */
864 page_shift = 16;
865 }
866 ret.base <<= page_shift;
867
868 return ret;
869 }
870
do_rvae_write(CPUARMState * env,uint64_t value,int idxmap,bool synced)871 static void do_rvae_write(CPUARMState *env, uint64_t value,
872 int idxmap, bool synced)
873 {
874 ARMMMUIdx one_idx = ARM_MMU_IDX_A | ctz32(idxmap);
875 TLBIRange range;
876 int bits;
877
878 range = tlbi_aa64_get_range(env, one_idx, value);
879 bits = tlbbits_for_regime(env, one_idx, range.base);
880
881 if (synced) {
882 tlb_flush_range_by_mmuidx_all_cpus_synced(env_cpu(env),
883 range.base,
884 range.length,
885 idxmap,
886 bits);
887 } else {
888 tlb_flush_range_by_mmuidx(env_cpu(env), range.base,
889 range.length, idxmap, bits);
890 }
891 }
892
tlbi_aa64_rvae1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)893 static void tlbi_aa64_rvae1_write(CPUARMState *env,
894 const ARMCPRegInfo *ri,
895 uint64_t value)
896 {
897 /*
898 * Invalidate by VA range, EL1&0.
899 * Currently handles all of RVAE1, RVAAE1, RVAALE1 and RVALE1,
900 * since we don't support flush-for-specific-ASID-only or
901 * flush-last-level-only.
902 */
903
904 do_rvae_write(env, value, vae1_tlbmask(env),
905 tlb_force_broadcast(env));
906 }
907
tlbi_aa64_rvae1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)908 static void tlbi_aa64_rvae1is_write(CPUARMState *env,
909 const ARMCPRegInfo *ri,
910 uint64_t value)
911 {
912 /*
913 * Invalidate by VA range, Inner/Outer Shareable EL1&0.
914 * Currently handles all of RVAE1IS, RVAE1OS, RVAAE1IS, RVAAE1OS,
915 * RVAALE1IS, RVAALE1OS, RVALE1IS and RVALE1OS, since we don't support
916 * flush-for-specific-ASID-only, flush-last-level-only or inner/outer
917 * shareable specific flushes.
918 */
919
920 do_rvae_write(env, value, vae1_tlbmask(env), true);
921 }
922
tlbi_aa64_rvae2_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)923 static void tlbi_aa64_rvae2_write(CPUARMState *env,
924 const ARMCPRegInfo *ri,
925 uint64_t value)
926 {
927 /*
928 * Invalidate by VA range, EL2.
929 * Currently handles all of RVAE2 and RVALE2,
930 * since we don't support flush-for-specific-ASID-only or
931 * flush-last-level-only.
932 */
933
934 do_rvae_write(env, value, vae2_tlbmask(env),
935 tlb_force_broadcast(env));
936
937
938 }
939
tlbi_aa64_rvae2is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)940 static void tlbi_aa64_rvae2is_write(CPUARMState *env,
941 const ARMCPRegInfo *ri,
942 uint64_t value)
943 {
944 /*
945 * Invalidate by VA range, Inner/Outer Shareable, EL2.
946 * Currently handles all of RVAE2IS, RVAE2OS, RVALE2IS and RVALE2OS,
947 * since we don't support flush-for-specific-ASID-only,
948 * flush-last-level-only or inner/outer shareable specific flushes.
949 */
950
951 do_rvae_write(env, value, vae2_tlbmask(env), true);
952
953 }
954
tlbi_aa64_rvae3_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)955 static void tlbi_aa64_rvae3_write(CPUARMState *env,
956 const ARMCPRegInfo *ri,
957 uint64_t value)
958 {
959 /*
960 * Invalidate by VA range, EL3.
961 * Currently handles all of RVAE3 and RVALE3,
962 * since we don't support flush-for-specific-ASID-only or
963 * flush-last-level-only.
964 */
965
966 do_rvae_write(env, value, ARMMMUIdxBit_E3, tlb_force_broadcast(env));
967 }
968
tlbi_aa64_rvae3is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)969 static void tlbi_aa64_rvae3is_write(CPUARMState *env,
970 const ARMCPRegInfo *ri,
971 uint64_t value)
972 {
973 /*
974 * Invalidate by VA range, EL3, Inner/Outer Shareable.
975 * Currently handles all of RVAE3IS, RVAE3OS, RVALE3IS and RVALE3OS,
976 * since we don't support flush-for-specific-ASID-only,
977 * flush-last-level-only or inner/outer specific flushes.
978 */
979
980 do_rvae_write(env, value, ARMMMUIdxBit_E3, true);
981 }
982
tlbi_aa64_ripas2e1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)983 static void tlbi_aa64_ripas2e1_write(CPUARMState *env, const ARMCPRegInfo *ri,
984 uint64_t value)
985 {
986 do_rvae_write(env, value, ipas2e1_tlbmask(env, value),
987 tlb_force_broadcast(env));
988 }
989
tlbi_aa64_ripas2e1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)990 static void tlbi_aa64_ripas2e1is_write(CPUARMState *env,
991 const ARMCPRegInfo *ri,
992 uint64_t value)
993 {
994 do_rvae_write(env, value, ipas2e1_tlbmask(env, value), true);
995 }
996
997 static const ARMCPRegInfo tlbirange_reginfo[] = {
998 { .name = "TLBI_RVAE1IS", .state = ARM_CP_STATE_AA64,
999 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 1,
1000 .access = PL1_W, .accessfn = access_ttlbis,
1001 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1002 .fgt = FGT_TLBIRVAE1IS,
1003 .writefn = tlbi_aa64_rvae1is_write },
1004 { .name = "TLBI_RVAAE1IS", .state = ARM_CP_STATE_AA64,
1005 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 3,
1006 .access = PL1_W, .accessfn = access_ttlbis,
1007 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1008 .fgt = FGT_TLBIRVAAE1IS,
1009 .writefn = tlbi_aa64_rvae1is_write },
1010 { .name = "TLBI_RVALE1IS", .state = ARM_CP_STATE_AA64,
1011 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 5,
1012 .access = PL1_W, .accessfn = access_ttlbis,
1013 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1014 .fgt = FGT_TLBIRVALE1IS,
1015 .writefn = tlbi_aa64_rvae1is_write },
1016 { .name = "TLBI_RVAALE1IS", .state = ARM_CP_STATE_AA64,
1017 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 7,
1018 .access = PL1_W, .accessfn = access_ttlbis,
1019 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1020 .fgt = FGT_TLBIRVAALE1IS,
1021 .writefn = tlbi_aa64_rvae1is_write },
1022 { .name = "TLBI_RVAE1OS", .state = ARM_CP_STATE_AA64,
1023 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 1,
1024 .access = PL1_W, .accessfn = access_ttlbos,
1025 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1026 .fgt = FGT_TLBIRVAE1OS,
1027 .writefn = tlbi_aa64_rvae1is_write },
1028 { .name = "TLBI_RVAAE1OS", .state = ARM_CP_STATE_AA64,
1029 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 3,
1030 .access = PL1_W, .accessfn = access_ttlbos,
1031 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1032 .fgt = FGT_TLBIRVAAE1OS,
1033 .writefn = tlbi_aa64_rvae1is_write },
1034 { .name = "TLBI_RVALE1OS", .state = ARM_CP_STATE_AA64,
1035 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 5,
1036 .access = PL1_W, .accessfn = access_ttlbos,
1037 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1038 .fgt = FGT_TLBIRVALE1OS,
1039 .writefn = tlbi_aa64_rvae1is_write },
1040 { .name = "TLBI_RVAALE1OS", .state = ARM_CP_STATE_AA64,
1041 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 7,
1042 .access = PL1_W, .accessfn = access_ttlbos,
1043 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1044 .fgt = FGT_TLBIRVAALE1OS,
1045 .writefn = tlbi_aa64_rvae1is_write },
1046 { .name = "TLBI_RVAE1", .state = ARM_CP_STATE_AA64,
1047 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 1,
1048 .access = PL1_W, .accessfn = access_ttlb,
1049 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1050 .fgt = FGT_TLBIRVAE1,
1051 .writefn = tlbi_aa64_rvae1_write },
1052 { .name = "TLBI_RVAAE1", .state = ARM_CP_STATE_AA64,
1053 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 3,
1054 .access = PL1_W, .accessfn = access_ttlb,
1055 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1056 .fgt = FGT_TLBIRVAAE1,
1057 .writefn = tlbi_aa64_rvae1_write },
1058 { .name = "TLBI_RVALE1", .state = ARM_CP_STATE_AA64,
1059 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 5,
1060 .access = PL1_W, .accessfn = access_ttlb,
1061 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1062 .fgt = FGT_TLBIRVALE1,
1063 .writefn = tlbi_aa64_rvae1_write },
1064 { .name = "TLBI_RVAALE1", .state = ARM_CP_STATE_AA64,
1065 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 7,
1066 .access = PL1_W, .accessfn = access_ttlb,
1067 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1068 .fgt = FGT_TLBIRVAALE1,
1069 .writefn = tlbi_aa64_rvae1_write },
1070 { .name = "TLBI_RIPAS2E1IS", .state = ARM_CP_STATE_AA64,
1071 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 2,
1072 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1073 .writefn = tlbi_aa64_ripas2e1is_write },
1074 { .name = "TLBI_RIPAS2LE1IS", .state = ARM_CP_STATE_AA64,
1075 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 6,
1076 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1077 .writefn = tlbi_aa64_ripas2e1is_write },
1078 { .name = "TLBI_RVAE2IS", .state = ARM_CP_STATE_AA64,
1079 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 2, .opc2 = 1,
1080 .access = PL2_W,
1081 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1082 .writefn = tlbi_aa64_rvae2is_write },
1083 { .name = "TLBI_RVALE2IS", .state = ARM_CP_STATE_AA64,
1084 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 2, .opc2 = 5,
1085 .access = PL2_W,
1086 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1087 .writefn = tlbi_aa64_rvae2is_write },
1088 { .name = "TLBI_RIPAS2E1", .state = ARM_CP_STATE_AA64,
1089 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 2,
1090 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1091 .writefn = tlbi_aa64_ripas2e1_write },
1092 { .name = "TLBI_RIPAS2LE1", .state = ARM_CP_STATE_AA64,
1093 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 6,
1094 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1095 .writefn = tlbi_aa64_ripas2e1_write },
1096 { .name = "TLBI_RVAE2OS", .state = ARM_CP_STATE_AA64,
1097 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 5, .opc2 = 1,
1098 .access = PL2_W,
1099 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1100 .writefn = tlbi_aa64_rvae2is_write },
1101 { .name = "TLBI_RVALE2OS", .state = ARM_CP_STATE_AA64,
1102 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 5, .opc2 = 5,
1103 .access = PL2_W,
1104 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1105 .writefn = tlbi_aa64_rvae2is_write },
1106 { .name = "TLBI_RVAE2", .state = ARM_CP_STATE_AA64,
1107 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 6, .opc2 = 1,
1108 .access = PL2_W,
1109 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1110 .writefn = tlbi_aa64_rvae2_write },
1111 { .name = "TLBI_RVALE2", .state = ARM_CP_STATE_AA64,
1112 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 6, .opc2 = 5,
1113 .access = PL2_W,
1114 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1115 .writefn = tlbi_aa64_rvae2_write },
1116 { .name = "TLBI_RVAE3IS", .state = ARM_CP_STATE_AA64,
1117 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 2, .opc2 = 1,
1118 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1119 .writefn = tlbi_aa64_rvae3is_write },
1120 { .name = "TLBI_RVALE3IS", .state = ARM_CP_STATE_AA64,
1121 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 2, .opc2 = 5,
1122 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1123 .writefn = tlbi_aa64_rvae3is_write },
1124 { .name = "TLBI_RVAE3OS", .state = ARM_CP_STATE_AA64,
1125 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 5, .opc2 = 1,
1126 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1127 .writefn = tlbi_aa64_rvae3is_write },
1128 { .name = "TLBI_RVALE3OS", .state = ARM_CP_STATE_AA64,
1129 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 5, .opc2 = 5,
1130 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1131 .writefn = tlbi_aa64_rvae3is_write },
1132 { .name = "TLBI_RVAE3", .state = ARM_CP_STATE_AA64,
1133 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 6, .opc2 = 1,
1134 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1135 .writefn = tlbi_aa64_rvae3_write },
1136 { .name = "TLBI_RVALE3", .state = ARM_CP_STATE_AA64,
1137 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 6, .opc2 = 5,
1138 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1139 .writefn = tlbi_aa64_rvae3_write },
1140 };
1141
1142 static const ARMCPRegInfo tlbios_reginfo[] = {
1143 { .name = "TLBI_VMALLE1OS", .state = ARM_CP_STATE_AA64,
1144 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 0,
1145 .access = PL1_W, .accessfn = access_ttlbos,
1146 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1147 .fgt = FGT_TLBIVMALLE1OS,
1148 .writefn = tlbi_aa64_vmalle1is_write },
1149 { .name = "TLBI_VAE1OS", .state = ARM_CP_STATE_AA64,
1150 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 1,
1151 .fgt = FGT_TLBIVAE1OS,
1152 .access = PL1_W, .accessfn = access_ttlbos,
1153 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1154 .writefn = tlbi_aa64_vae1is_write },
1155 { .name = "TLBI_ASIDE1OS", .state = ARM_CP_STATE_AA64,
1156 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 2,
1157 .access = PL1_W, .accessfn = access_ttlbos,
1158 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1159 .fgt = FGT_TLBIASIDE1OS,
1160 .writefn = tlbi_aa64_vmalle1is_write },
1161 { .name = "TLBI_VAAE1OS", .state = ARM_CP_STATE_AA64,
1162 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 3,
1163 .access = PL1_W, .accessfn = access_ttlbos,
1164 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1165 .fgt = FGT_TLBIVAAE1OS,
1166 .writefn = tlbi_aa64_vae1is_write },
1167 { .name = "TLBI_VALE1OS", .state = ARM_CP_STATE_AA64,
1168 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 5,
1169 .access = PL1_W, .accessfn = access_ttlbos,
1170 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1171 .fgt = FGT_TLBIVALE1OS,
1172 .writefn = tlbi_aa64_vae1is_write },
1173 { .name = "TLBI_VAALE1OS", .state = ARM_CP_STATE_AA64,
1174 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 7,
1175 .access = PL1_W, .accessfn = access_ttlbos,
1176 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1177 .fgt = FGT_TLBIVAALE1OS,
1178 .writefn = tlbi_aa64_vae1is_write },
1179 { .name = "TLBI_ALLE2OS", .state = ARM_CP_STATE_AA64,
1180 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 0,
1181 .access = PL2_W,
1182 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1183 .writefn = tlbi_aa64_alle2is_write },
1184 { .name = "TLBI_VAE2OS", .state = ARM_CP_STATE_AA64,
1185 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 1,
1186 .access = PL2_W,
1187 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1188 .writefn = tlbi_aa64_vae2is_write },
1189 { .name = "TLBI_ALLE1OS", .state = ARM_CP_STATE_AA64,
1190 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 4,
1191 .access = PL2_W,
1192 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1193 .writefn = tlbi_aa64_alle1is_write },
1194 { .name = "TLBI_VALE2OS", .state = ARM_CP_STATE_AA64,
1195 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 5,
1196 .access = PL2_W,
1197 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1198 .writefn = tlbi_aa64_vae2is_write },
1199 { .name = "TLBI_VMALLS12E1OS", .state = ARM_CP_STATE_AA64,
1200 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 6,
1201 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1202 .writefn = tlbi_aa64_alle1is_write },
1203 { .name = "TLBI_IPAS2E1OS", .state = ARM_CP_STATE_AA64,
1204 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 0,
1205 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1206 { .name = "TLBI_RIPAS2E1OS", .state = ARM_CP_STATE_AA64,
1207 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 3,
1208 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1209 { .name = "TLBI_IPAS2LE1OS", .state = ARM_CP_STATE_AA64,
1210 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 4,
1211 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1212 { .name = "TLBI_RIPAS2LE1OS", .state = ARM_CP_STATE_AA64,
1213 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 7,
1214 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1215 { .name = "TLBI_ALLE3OS", .state = ARM_CP_STATE_AA64,
1216 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 0,
1217 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1218 .writefn = tlbi_aa64_alle3is_write },
1219 { .name = "TLBI_VAE3OS", .state = ARM_CP_STATE_AA64,
1220 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 1,
1221 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1222 .writefn = tlbi_aa64_vae3is_write },
1223 { .name = "TLBI_VALE3OS", .state = ARM_CP_STATE_AA64,
1224 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 5,
1225 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1226 .writefn = tlbi_aa64_vae3is_write },
1227 };
1228
tlbi_aa64_paall_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)1229 static void tlbi_aa64_paall_write(CPUARMState *env, const ARMCPRegInfo *ri,
1230 uint64_t value)
1231 {
1232 CPUState *cs = env_cpu(env);
1233
1234 tlb_flush(cs);
1235 }
1236
tlbi_aa64_paallos_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)1237 static void tlbi_aa64_paallos_write(CPUARMState *env, const ARMCPRegInfo *ri,
1238 uint64_t value)
1239 {
1240 CPUState *cs = env_cpu(env);
1241
1242 tlb_flush_all_cpus_synced(cs);
1243 }
1244
1245 static const ARMCPRegInfo tlbi_rme_reginfo[] = {
1246 { .name = "TLBI_PAALL", .state = ARM_CP_STATE_AA64,
1247 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 4,
1248 .access = PL3_W, .type = ARM_CP_NO_RAW,
1249 .writefn = tlbi_aa64_paall_write },
1250 { .name = "TLBI_PAALLOS", .state = ARM_CP_STATE_AA64,
1251 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 4,
1252 .access = PL3_W, .type = ARM_CP_NO_RAW,
1253 .writefn = tlbi_aa64_paallos_write },
1254 /*
1255 * QEMU does not have a way to invalidate by physical address, thus
1256 * invalidating a range of physical addresses is accomplished by
1257 * flushing all tlb entries in the outer shareable domain,
1258 * just like PAALLOS.
1259 */
1260 { .name = "TLBI_RPALOS", .state = ARM_CP_STATE_AA64,
1261 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 4, .opc2 = 7,
1262 .access = PL3_W, .type = ARM_CP_NO_RAW,
1263 .writefn = tlbi_aa64_paallos_write },
1264 { .name = "TLBI_RPAOS", .state = ARM_CP_STATE_AA64,
1265 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 4, .opc2 = 3,
1266 .access = PL3_W, .type = ARM_CP_NO_RAW,
1267 .writefn = tlbi_aa64_paallos_write },
1268 };
1269
define_tlb_insn_regs(ARMCPU * cpu)1270 void define_tlb_insn_regs(ARMCPU *cpu)
1271 {
1272 CPUARMState *env = &cpu->env;
1273
1274 if (!arm_feature(env, ARM_FEATURE_V7)) {
1275 define_arm_cp_regs(cpu, tlbi_not_v7_cp_reginfo);
1276 } else {
1277 define_arm_cp_regs(cpu, tlbi_v7_cp_reginfo);
1278 }
1279 if (arm_feature(env, ARM_FEATURE_V7MP) &&
1280 !arm_feature(env, ARM_FEATURE_PMSA)) {
1281 define_arm_cp_regs(cpu, tlbi_v7mp_cp_reginfo);
1282 }
1283 if (arm_feature(env, ARM_FEATURE_V8)) {
1284 define_arm_cp_regs(cpu, tlbi_v8_cp_reginfo);
1285 }
1286 /*
1287 * We retain the existing logic for when to register these TLBI
1288 * ops (i.e. matching the condition for el2_cp_reginfo[] in
1289 * helper.c), but we will be able to simplify this later.
1290 */
1291 if (arm_feature(env, ARM_FEATURE_EL2)) {
1292 define_arm_cp_regs(cpu, tlbi_el2_cp_reginfo);
1293 }
1294 if (arm_feature(env, ARM_FEATURE_EL3)) {
1295 define_arm_cp_regs(cpu, tlbi_el3_cp_reginfo);
1296 }
1297 if (cpu_isar_feature(aa64_tlbirange, cpu)) {
1298 define_arm_cp_regs(cpu, tlbirange_reginfo);
1299 }
1300 if (cpu_isar_feature(aa64_tlbios, cpu)) {
1301 define_arm_cp_regs(cpu, tlbios_reginfo);
1302 }
1303 if (cpu_isar_feature(aa64_rme, cpu)) {
1304 define_arm_cp_regs(cpu, tlbi_rme_reginfo);
1305 }
1306 }
1307