1 /* 2 * Helpers for TLBI insns 3 * 4 * This code is licensed under the GNU GPL v2 or later. 5 * 6 * SPDX-License-Identifier: GPL-2.0-or-later 7 */ 8 #include "qemu/osdep.h" 9 #include "qemu/log.h" 10 #include "exec/cputlb.h" 11 #include "exec/target_page.h" 12 #include "cpu.h" 13 #include "internals.h" 14 #include "cpu-features.h" 15 #include "cpregs.h" 16 17 /* Check for traps from EL1 due to HCR_EL2.TTLB. */ 18 static CPAccessResult access_ttlb(CPUARMState *env, const ARMCPRegInfo *ri, 19 bool isread) 20 { 21 if (arm_current_el(env) == 1 && (arm_hcr_el2_eff(env) & HCR_TTLB)) { 22 return CP_ACCESS_TRAP_EL2; 23 } 24 return CP_ACCESS_OK; 25 } 26 27 /* Check for traps from EL1 due to HCR_EL2.TTLB or TTLBIS. */ 28 static CPAccessResult access_ttlbis(CPUARMState *env, const ARMCPRegInfo *ri, 29 bool isread) 30 { 31 if (arm_current_el(env) == 1 && 32 (arm_hcr_el2_eff(env) & (HCR_TTLB | HCR_TTLBIS))) { 33 return CP_ACCESS_TRAP_EL2; 34 } 35 return CP_ACCESS_OK; 36 } 37 38 #ifdef TARGET_AARCH64 39 /* Check for traps from EL1 due to HCR_EL2.TTLB or TTLBOS. */ 40 static CPAccessResult access_ttlbos(CPUARMState *env, const ARMCPRegInfo *ri, 41 bool isread) 42 { 43 if (arm_current_el(env) == 1 && 44 (arm_hcr_el2_eff(env) & (HCR_TTLB | HCR_TTLBOS))) { 45 return CP_ACCESS_TRAP_EL2; 46 } 47 return CP_ACCESS_OK; 48 } 49 #endif 50 51 /* IS variants of TLB operations must affect all cores */ 52 static void tlbiall_is_write(CPUARMState *env, const ARMCPRegInfo *ri, 53 uint64_t value) 54 { 55 CPUState *cs = env_cpu(env); 56 57 tlb_flush_all_cpus_synced(cs); 58 } 59 60 static void tlbiasid_is_write(CPUARMState *env, const ARMCPRegInfo *ri, 61 uint64_t value) 62 { 63 CPUState *cs = env_cpu(env); 64 65 tlb_flush_all_cpus_synced(cs); 66 } 67 68 static void tlbimva_is_write(CPUARMState *env, const ARMCPRegInfo *ri, 69 uint64_t value) 70 { 71 CPUState *cs = env_cpu(env); 72 73 tlb_flush_page_all_cpus_synced(cs, value & TARGET_PAGE_MASK); 74 } 75 76 static void tlbimvaa_is_write(CPUARMState *env, const ARMCPRegInfo *ri, 77 uint64_t value) 78 { 79 CPUState *cs = env_cpu(env); 80 81 tlb_flush_page_all_cpus_synced(cs, value & TARGET_PAGE_MASK); 82 } 83 84 /* 85 * Non-IS variants of TLB operations are upgraded to 86 * IS versions if we are at EL1 and HCR_EL2.FB is effectively set to 87 * force broadcast of these operations. 88 */ 89 static bool tlb_force_broadcast(CPUARMState *env) 90 { 91 return arm_current_el(env) == 1 && (arm_hcr_el2_eff(env) & HCR_FB); 92 } 93 94 static void tlbiall_write(CPUARMState *env, const ARMCPRegInfo *ri, 95 uint64_t value) 96 { 97 /* Invalidate all (TLBIALL) */ 98 CPUState *cs = env_cpu(env); 99 100 if (tlb_force_broadcast(env)) { 101 tlb_flush_all_cpus_synced(cs); 102 } else { 103 tlb_flush(cs); 104 } 105 } 106 107 static void tlbimva_write(CPUARMState *env, const ARMCPRegInfo *ri, 108 uint64_t value) 109 { 110 /* Invalidate single TLB entry by MVA and ASID (TLBIMVA) */ 111 CPUState *cs = env_cpu(env); 112 113 value &= TARGET_PAGE_MASK; 114 if (tlb_force_broadcast(env)) { 115 tlb_flush_page_all_cpus_synced(cs, value); 116 } else { 117 tlb_flush_page(cs, value); 118 } 119 } 120 121 static void tlbiasid_write(CPUARMState *env, const ARMCPRegInfo *ri, 122 uint64_t value) 123 { 124 /* Invalidate by ASID (TLBIASID) */ 125 CPUState *cs = env_cpu(env); 126 127 if (tlb_force_broadcast(env)) { 128 tlb_flush_all_cpus_synced(cs); 129 } else { 130 tlb_flush(cs); 131 } 132 } 133 134 static void tlbimvaa_write(CPUARMState *env, const ARMCPRegInfo *ri, 135 uint64_t value) 136 { 137 /* Invalidate single entry by MVA, all ASIDs (TLBIMVAA) */ 138 CPUState *cs = env_cpu(env); 139 140 value &= TARGET_PAGE_MASK; 141 if (tlb_force_broadcast(env)) { 142 tlb_flush_page_all_cpus_synced(cs, value); 143 } else { 144 tlb_flush_page(cs, value); 145 } 146 } 147 148 static void tlbimva_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri, 149 uint64_t value) 150 { 151 CPUState *cs = env_cpu(env); 152 uint64_t pageaddr = value & ~MAKE_64BIT_MASK(0, 12); 153 154 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_E2); 155 } 156 157 static void tlbimva_hyp_is_write(CPUARMState *env, const ARMCPRegInfo *ri, 158 uint64_t value) 159 { 160 CPUState *cs = env_cpu(env); 161 uint64_t pageaddr = value & ~MAKE_64BIT_MASK(0, 12); 162 163 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, 164 ARMMMUIdxBit_E2); 165 } 166 167 static void tlbiipas2_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri, 168 uint64_t value) 169 { 170 CPUState *cs = env_cpu(env); 171 uint64_t pageaddr = (value & MAKE_64BIT_MASK(0, 28)) << 12; 172 173 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_Stage2); 174 } 175 176 static void tlbiipas2is_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri, 177 uint64_t value) 178 { 179 CPUState *cs = env_cpu(env); 180 uint64_t pageaddr = (value & MAKE_64BIT_MASK(0, 28)) << 12; 181 182 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, ARMMMUIdxBit_Stage2); 183 } 184 185 static void tlbiall_nsnh_write(CPUARMState *env, const ARMCPRegInfo *ri, 186 uint64_t value) 187 { 188 CPUState *cs = env_cpu(env); 189 190 tlb_flush_by_mmuidx(cs, alle1_tlbmask(env)); 191 } 192 193 static void tlbiall_nsnh_is_write(CPUARMState *env, const ARMCPRegInfo *ri, 194 uint64_t value) 195 { 196 CPUState *cs = env_cpu(env); 197 198 tlb_flush_by_mmuidx_all_cpus_synced(cs, alle1_tlbmask(env)); 199 } 200 201 202 static void tlbiall_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri, 203 uint64_t value) 204 { 205 CPUState *cs = env_cpu(env); 206 207 tlb_flush_by_mmuidx(cs, ARMMMUIdxBit_E2); 208 } 209 210 static void tlbiall_hyp_is_write(CPUARMState *env, const ARMCPRegInfo *ri, 211 uint64_t value) 212 { 213 CPUState *cs = env_cpu(env); 214 215 tlb_flush_by_mmuidx_all_cpus_synced(cs, ARMMMUIdxBit_E2); 216 } 217 218 /* 219 * See: D4.7.2 TLB maintenance requirements and the TLB maintenance instructions 220 * Page D4-1736 (DDI0487A.b) 221 */ 222 223 static int vae1_tlbmask(CPUARMState *env) 224 { 225 uint64_t hcr = arm_hcr_el2_eff(env); 226 uint16_t mask; 227 228 assert(arm_feature(env, ARM_FEATURE_AARCH64)); 229 230 if ((hcr & (HCR_E2H | HCR_TGE)) == (HCR_E2H | HCR_TGE)) { 231 mask = ARMMMUIdxBit_E20_2 | 232 ARMMMUIdxBit_E20_2_PAN | 233 ARMMMUIdxBit_E20_0; 234 } else { 235 /* This is AArch64 only, so we don't need to touch the EL30_x TLBs */ 236 mask = ARMMMUIdxBit_E10_1 | 237 ARMMMUIdxBit_E10_1_PAN | 238 ARMMMUIdxBit_E10_0; 239 } 240 return mask; 241 } 242 243 static int vae2_tlbmask(CPUARMState *env) 244 { 245 uint64_t hcr = arm_hcr_el2_eff(env); 246 uint16_t mask; 247 248 if (hcr & HCR_E2H) { 249 mask = ARMMMUIdxBit_E20_2 | 250 ARMMMUIdxBit_E20_2_PAN | 251 ARMMMUIdxBit_E20_0; 252 } else { 253 mask = ARMMMUIdxBit_E2; 254 } 255 return mask; 256 } 257 258 /* Return 56 if TBI is enabled, 64 otherwise. */ 259 static int tlbbits_for_regime(CPUARMState *env, ARMMMUIdx mmu_idx, 260 uint64_t addr) 261 { 262 uint64_t tcr = regime_tcr(env, mmu_idx); 263 int tbi = aa64_va_parameter_tbi(tcr, mmu_idx); 264 int select = extract64(addr, 55, 1); 265 266 return (tbi >> select) & 1 ? 56 : 64; 267 } 268 269 static int vae1_tlbbits(CPUARMState *env, uint64_t addr) 270 { 271 uint64_t hcr = arm_hcr_el2_eff(env); 272 ARMMMUIdx mmu_idx; 273 274 assert(arm_feature(env, ARM_FEATURE_AARCH64)); 275 276 /* Only the regime of the mmu_idx below is significant. */ 277 if ((hcr & (HCR_E2H | HCR_TGE)) == (HCR_E2H | HCR_TGE)) { 278 mmu_idx = ARMMMUIdx_E20_0; 279 } else { 280 mmu_idx = ARMMMUIdx_E10_0; 281 } 282 283 return tlbbits_for_regime(env, mmu_idx, addr); 284 } 285 286 static int vae2_tlbbits(CPUARMState *env, uint64_t addr) 287 { 288 uint64_t hcr = arm_hcr_el2_eff(env); 289 ARMMMUIdx mmu_idx; 290 291 /* 292 * Only the regime of the mmu_idx below is significant. 293 * Regime EL2&0 has two ranges with separate TBI configuration, while EL2 294 * only has one. 295 */ 296 if (hcr & HCR_E2H) { 297 mmu_idx = ARMMMUIdx_E20_2; 298 } else { 299 mmu_idx = ARMMMUIdx_E2; 300 } 301 302 return tlbbits_for_regime(env, mmu_idx, addr); 303 } 304 305 static void tlbi_aa64_vmalle1is_write(CPUARMState *env, const ARMCPRegInfo *ri, 306 uint64_t value) 307 { 308 CPUState *cs = env_cpu(env); 309 int mask = vae1_tlbmask(env); 310 311 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask); 312 } 313 314 static void tlbi_aa64_vmalle1_write(CPUARMState *env, const ARMCPRegInfo *ri, 315 uint64_t value) 316 { 317 CPUState *cs = env_cpu(env); 318 int mask = vae1_tlbmask(env); 319 320 if (tlb_force_broadcast(env)) { 321 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask); 322 } else { 323 tlb_flush_by_mmuidx(cs, mask); 324 } 325 } 326 327 static int e2_tlbmask(CPUARMState *env) 328 { 329 return (ARMMMUIdxBit_E20_0 | 330 ARMMMUIdxBit_E20_2 | 331 ARMMMUIdxBit_E20_2_PAN | 332 ARMMMUIdxBit_E2); 333 } 334 335 static void tlbi_aa64_alle1_write(CPUARMState *env, const ARMCPRegInfo *ri, 336 uint64_t value) 337 { 338 CPUState *cs = env_cpu(env); 339 int mask = alle1_tlbmask(env); 340 341 tlb_flush_by_mmuidx(cs, mask); 342 } 343 344 static void tlbi_aa64_alle2_write(CPUARMState *env, const ARMCPRegInfo *ri, 345 uint64_t value) 346 { 347 CPUState *cs = env_cpu(env); 348 int mask = e2_tlbmask(env); 349 350 tlb_flush_by_mmuidx(cs, mask); 351 } 352 353 static void tlbi_aa64_alle3_write(CPUARMState *env, const ARMCPRegInfo *ri, 354 uint64_t value) 355 { 356 ARMCPU *cpu = env_archcpu(env); 357 CPUState *cs = CPU(cpu); 358 359 tlb_flush_by_mmuidx(cs, ARMMMUIdxBit_E3); 360 } 361 362 static void tlbi_aa64_alle1is_write(CPUARMState *env, const ARMCPRegInfo *ri, 363 uint64_t value) 364 { 365 CPUState *cs = env_cpu(env); 366 int mask = alle1_tlbmask(env); 367 368 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask); 369 } 370 371 static void tlbi_aa64_alle2is_write(CPUARMState *env, const ARMCPRegInfo *ri, 372 uint64_t value) 373 { 374 CPUState *cs = env_cpu(env); 375 int mask = e2_tlbmask(env); 376 377 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask); 378 } 379 380 static void tlbi_aa64_alle3is_write(CPUARMState *env, const ARMCPRegInfo *ri, 381 uint64_t value) 382 { 383 CPUState *cs = env_cpu(env); 384 385 tlb_flush_by_mmuidx_all_cpus_synced(cs, ARMMMUIdxBit_E3); 386 } 387 388 static void tlbi_aa64_vae2_write(CPUARMState *env, const ARMCPRegInfo *ri, 389 uint64_t value) 390 { 391 /* 392 * Invalidate by VA, EL2 393 * Currently handles both VAE2 and VALE2, since we don't support 394 * flush-last-level-only. 395 */ 396 CPUState *cs = env_cpu(env); 397 int mask = vae2_tlbmask(env); 398 uint64_t pageaddr = sextract64(value << 12, 0, 56); 399 int bits = vae2_tlbbits(env, pageaddr); 400 401 tlb_flush_page_bits_by_mmuidx(cs, pageaddr, mask, bits); 402 } 403 404 static void tlbi_aa64_vae3_write(CPUARMState *env, const ARMCPRegInfo *ri, 405 uint64_t value) 406 { 407 /* 408 * Invalidate by VA, EL3 409 * Currently handles both VAE3 and VALE3, since we don't support 410 * flush-last-level-only. 411 */ 412 ARMCPU *cpu = env_archcpu(env); 413 CPUState *cs = CPU(cpu); 414 uint64_t pageaddr = sextract64(value << 12, 0, 56); 415 416 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_E3); 417 } 418 419 static void tlbi_aa64_vae1is_write(CPUARMState *env, const ARMCPRegInfo *ri, 420 uint64_t value) 421 { 422 CPUState *cs = env_cpu(env); 423 int mask = vae1_tlbmask(env); 424 uint64_t pageaddr = sextract64(value << 12, 0, 56); 425 int bits = vae1_tlbbits(env, pageaddr); 426 427 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits); 428 } 429 430 static void tlbi_aa64_vae1_write(CPUARMState *env, const ARMCPRegInfo *ri, 431 uint64_t value) 432 { 433 /* 434 * Invalidate by VA, EL1&0 (AArch64 version). 435 * Currently handles all of VAE1, VAAE1, VAALE1 and VALE1, 436 * since we don't support flush-for-specific-ASID-only or 437 * flush-last-level-only. 438 */ 439 CPUState *cs = env_cpu(env); 440 int mask = vae1_tlbmask(env); 441 uint64_t pageaddr = sextract64(value << 12, 0, 56); 442 int bits = vae1_tlbbits(env, pageaddr); 443 444 if (tlb_force_broadcast(env)) { 445 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits); 446 } else { 447 tlb_flush_page_bits_by_mmuidx(cs, pageaddr, mask, bits); 448 } 449 } 450 451 static void tlbi_aa64_vae2is_write(CPUARMState *env, const ARMCPRegInfo *ri, 452 uint64_t value) 453 { 454 CPUState *cs = env_cpu(env); 455 int mask = vae2_tlbmask(env); 456 uint64_t pageaddr = sextract64(value << 12, 0, 56); 457 int bits = vae2_tlbbits(env, pageaddr); 458 459 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits); 460 } 461 462 static void tlbi_aa64_vae3is_write(CPUARMState *env, const ARMCPRegInfo *ri, 463 uint64_t value) 464 { 465 CPUState *cs = env_cpu(env); 466 uint64_t pageaddr = sextract64(value << 12, 0, 56); 467 int bits = tlbbits_for_regime(env, ARMMMUIdx_E3, pageaddr); 468 469 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, 470 ARMMMUIdxBit_E3, bits); 471 } 472 473 static int ipas2e1_tlbmask(CPUARMState *env, int64_t value) 474 { 475 /* 476 * The MSB of value is the NS field, which only applies if SEL2 477 * is implemented and SCR_EL3.NS is not set (i.e. in secure mode). 478 */ 479 return (value >= 0 480 && cpu_isar_feature(aa64_sel2, env_archcpu(env)) 481 && arm_is_secure_below_el3(env) 482 ? ARMMMUIdxBit_Stage2_S 483 : ARMMMUIdxBit_Stage2); 484 } 485 486 static void tlbi_aa64_ipas2e1_write(CPUARMState *env, const ARMCPRegInfo *ri, 487 uint64_t value) 488 { 489 CPUState *cs = env_cpu(env); 490 int mask = ipas2e1_tlbmask(env, value); 491 uint64_t pageaddr = sextract64(value << 12, 0, 56); 492 493 if (tlb_force_broadcast(env)) { 494 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, mask); 495 } else { 496 tlb_flush_page_by_mmuidx(cs, pageaddr, mask); 497 } 498 } 499 500 static void tlbi_aa64_ipas2e1is_write(CPUARMState *env, const ARMCPRegInfo *ri, 501 uint64_t value) 502 { 503 CPUState *cs = env_cpu(env); 504 int mask = ipas2e1_tlbmask(env, value); 505 uint64_t pageaddr = sextract64(value << 12, 0, 56); 506 507 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, mask); 508 } 509 510 static const ARMCPRegInfo tlbi_not_v7_cp_reginfo[] = { 511 /* 512 * MMU TLB control. Note that the wildcarding means we cover not just 513 * the unified TLB ops but also the dside/iside/inner-shareable variants. 514 */ 515 { .name = "TLBIALL", .cp = 15, .crn = 8, .crm = CP_ANY, 516 .opc1 = CP_ANY, .opc2 = 0, .access = PL1_W, .writefn = tlbiall_write, 517 .type = ARM_CP_NO_RAW }, 518 { .name = "TLBIMVA", .cp = 15, .crn = 8, .crm = CP_ANY, 519 .opc1 = CP_ANY, .opc2 = 1, .access = PL1_W, .writefn = tlbimva_write, 520 .type = ARM_CP_NO_RAW }, 521 { .name = "TLBIASID", .cp = 15, .crn = 8, .crm = CP_ANY, 522 .opc1 = CP_ANY, .opc2 = 2, .access = PL1_W, .writefn = tlbiasid_write, 523 .type = ARM_CP_NO_RAW }, 524 { .name = "TLBIMVAA", .cp = 15, .crn = 8, .crm = CP_ANY, 525 .opc1 = CP_ANY, .opc2 = 3, .access = PL1_W, .writefn = tlbimvaa_write, 526 .type = ARM_CP_NO_RAW }, 527 }; 528 529 static const ARMCPRegInfo tlbi_v7_cp_reginfo[] = { 530 /* 32 bit ITLB invalidates */ 531 { .name = "ITLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 0, 532 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 533 .writefn = tlbiall_write }, 534 { .name = "ITLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 1, 535 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 536 .writefn = tlbimva_write }, 537 { .name = "ITLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 2, 538 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 539 .writefn = tlbiasid_write }, 540 /* 32 bit DTLB invalidates */ 541 { .name = "DTLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 0, 542 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 543 .writefn = tlbiall_write }, 544 { .name = "DTLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 1, 545 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 546 .writefn = tlbimva_write }, 547 { .name = "DTLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 2, 548 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 549 .writefn = tlbiasid_write }, 550 /* 32 bit TLB invalidates */ 551 { .name = "TLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 0, 552 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 553 .writefn = tlbiall_write }, 554 { .name = "TLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 1, 555 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 556 .writefn = tlbimva_write }, 557 { .name = "TLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 2, 558 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 559 .writefn = tlbiasid_write }, 560 { .name = "TLBIMVAA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 3, 561 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 562 .writefn = tlbimvaa_write }, 563 }; 564 565 static const ARMCPRegInfo tlbi_v7mp_cp_reginfo[] = { 566 /* 32 bit TLB invalidates, Inner Shareable */ 567 { .name = "TLBIALLIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 0, 568 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis, 569 .writefn = tlbiall_is_write }, 570 { .name = "TLBIMVAIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 1, 571 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis, 572 .writefn = tlbimva_is_write }, 573 { .name = "TLBIASIDIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 2, 574 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis, 575 .writefn = tlbiasid_is_write }, 576 { .name = "TLBIMVAAIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 3, 577 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis, 578 .writefn = tlbimvaa_is_write }, 579 }; 580 581 static const ARMCPRegInfo tlbi_v8_cp_reginfo[] = { 582 /* AArch32 TLB invalidate last level of translation table walk */ 583 { .name = "TLBIMVALIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 5, 584 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis, 585 .writefn = tlbimva_is_write }, 586 { .name = "TLBIMVAALIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 7, 587 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis, 588 .writefn = tlbimvaa_is_write }, 589 { .name = "TLBIMVAL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 5, 590 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 591 .writefn = tlbimva_write }, 592 { .name = "TLBIMVAAL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 7, 593 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb, 594 .writefn = tlbimvaa_write }, 595 { .name = "TLBIMVALH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 5, 596 .type = ARM_CP_NO_RAW, .access = PL2_W, 597 .writefn = tlbimva_hyp_write }, 598 { .name = "TLBIMVALHIS", 599 .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 5, 600 .type = ARM_CP_NO_RAW, .access = PL2_W, 601 .writefn = tlbimva_hyp_is_write }, 602 { .name = "TLBIIPAS2", 603 .cp = 15, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 1, 604 .type = ARM_CP_NO_RAW, .access = PL2_W, 605 .writefn = tlbiipas2_hyp_write }, 606 { .name = "TLBIIPAS2IS", 607 .cp = 15, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 1, 608 .type = ARM_CP_NO_RAW, .access = PL2_W, 609 .writefn = tlbiipas2is_hyp_write }, 610 { .name = "TLBIIPAS2L", 611 .cp = 15, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 5, 612 .type = ARM_CP_NO_RAW, .access = PL2_W, 613 .writefn = tlbiipas2_hyp_write }, 614 { .name = "TLBIIPAS2LIS", 615 .cp = 15, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 5, 616 .type = ARM_CP_NO_RAW, .access = PL2_W, 617 .writefn = tlbiipas2is_hyp_write }, 618 /* AArch64 TLBI operations */ 619 { .name = "TLBI_VMALLE1IS", .state = ARM_CP_STATE_AA64, 620 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 0, 621 .access = PL1_W, .accessfn = access_ttlbis, 622 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 623 .fgt = FGT_TLBIVMALLE1IS, 624 .writefn = tlbi_aa64_vmalle1is_write }, 625 { .name = "TLBI_VAE1IS", .state = ARM_CP_STATE_AA64, 626 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 1, 627 .access = PL1_W, .accessfn = access_ttlbis, 628 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 629 .fgt = FGT_TLBIVAE1IS, 630 .writefn = tlbi_aa64_vae1is_write }, 631 { .name = "TLBI_ASIDE1IS", .state = ARM_CP_STATE_AA64, 632 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 2, 633 .access = PL1_W, .accessfn = access_ttlbis, 634 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 635 .fgt = FGT_TLBIASIDE1IS, 636 .writefn = tlbi_aa64_vmalle1is_write }, 637 { .name = "TLBI_VAAE1IS", .state = ARM_CP_STATE_AA64, 638 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 3, 639 .access = PL1_W, .accessfn = access_ttlbis, 640 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 641 .fgt = FGT_TLBIVAAE1IS, 642 .writefn = tlbi_aa64_vae1is_write }, 643 { .name = "TLBI_VALE1IS", .state = ARM_CP_STATE_AA64, 644 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 5, 645 .access = PL1_W, .accessfn = access_ttlbis, 646 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 647 .fgt = FGT_TLBIVALE1IS, 648 .writefn = tlbi_aa64_vae1is_write }, 649 { .name = "TLBI_VAALE1IS", .state = ARM_CP_STATE_AA64, 650 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 7, 651 .access = PL1_W, .accessfn = access_ttlbis, 652 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 653 .fgt = FGT_TLBIVAALE1IS, 654 .writefn = tlbi_aa64_vae1is_write }, 655 { .name = "TLBI_VMALLE1", .state = ARM_CP_STATE_AA64, 656 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 0, 657 .access = PL1_W, .accessfn = access_ttlb, 658 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 659 .fgt = FGT_TLBIVMALLE1, 660 .writefn = tlbi_aa64_vmalle1_write }, 661 { .name = "TLBI_VAE1", .state = ARM_CP_STATE_AA64, 662 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 1, 663 .access = PL1_W, .accessfn = access_ttlb, 664 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 665 .fgt = FGT_TLBIVAE1, 666 .writefn = tlbi_aa64_vae1_write }, 667 { .name = "TLBI_ASIDE1", .state = ARM_CP_STATE_AA64, 668 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 2, 669 .access = PL1_W, .accessfn = access_ttlb, 670 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 671 .fgt = FGT_TLBIASIDE1, 672 .writefn = tlbi_aa64_vmalle1_write }, 673 { .name = "TLBI_VAAE1", .state = ARM_CP_STATE_AA64, 674 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 3, 675 .access = PL1_W, .accessfn = access_ttlb, 676 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 677 .fgt = FGT_TLBIVAAE1, 678 .writefn = tlbi_aa64_vae1_write }, 679 { .name = "TLBI_VALE1", .state = ARM_CP_STATE_AA64, 680 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 5, 681 .access = PL1_W, .accessfn = access_ttlb, 682 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 683 .fgt = FGT_TLBIVALE1, 684 .writefn = tlbi_aa64_vae1_write }, 685 { .name = "TLBI_VAALE1", .state = ARM_CP_STATE_AA64, 686 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 7, 687 .access = PL1_W, .accessfn = access_ttlb, 688 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 689 .fgt = FGT_TLBIVAALE1, 690 .writefn = tlbi_aa64_vae1_write }, 691 { .name = "TLBI_IPAS2E1IS", .state = ARM_CP_STATE_AA64, 692 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 1, 693 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 694 .writefn = tlbi_aa64_ipas2e1is_write }, 695 { .name = "TLBI_IPAS2LE1IS", .state = ARM_CP_STATE_AA64, 696 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 5, 697 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 698 .writefn = tlbi_aa64_ipas2e1is_write }, 699 { .name = "TLBI_ALLE1IS", .state = ARM_CP_STATE_AA64, 700 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 4, 701 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 702 .writefn = tlbi_aa64_alle1is_write }, 703 { .name = "TLBI_VMALLS12E1IS", .state = ARM_CP_STATE_AA64, 704 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 6, 705 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 706 .writefn = tlbi_aa64_alle1is_write }, 707 { .name = "TLBI_IPAS2E1", .state = ARM_CP_STATE_AA64, 708 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 1, 709 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 710 .writefn = tlbi_aa64_ipas2e1_write }, 711 { .name = "TLBI_IPAS2LE1", .state = ARM_CP_STATE_AA64, 712 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 5, 713 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 714 .writefn = tlbi_aa64_ipas2e1_write }, 715 { .name = "TLBI_ALLE1", .state = ARM_CP_STATE_AA64, 716 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 4, 717 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 718 .writefn = tlbi_aa64_alle1_write }, 719 { .name = "TLBI_VMALLS12E1", .state = ARM_CP_STATE_AA64, 720 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 6, 721 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 722 .writefn = tlbi_aa64_alle1is_write }, 723 }; 724 725 static const ARMCPRegInfo tlbi_el2_cp_reginfo[] = { 726 { .name = "TLBIALLNSNH", 727 .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 4, 728 .type = ARM_CP_NO_RAW, .access = PL2_W, 729 .writefn = tlbiall_nsnh_write }, 730 { .name = "TLBIALLNSNHIS", 731 .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 4, 732 .type = ARM_CP_NO_RAW, .access = PL2_W, 733 .writefn = tlbiall_nsnh_is_write }, 734 { .name = "TLBIALLH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 0, 735 .type = ARM_CP_NO_RAW, .access = PL2_W, 736 .writefn = tlbiall_hyp_write }, 737 { .name = "TLBIALLHIS", .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 0, 738 .type = ARM_CP_NO_RAW, .access = PL2_W, 739 .writefn = tlbiall_hyp_is_write }, 740 { .name = "TLBIMVAH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 1, 741 .type = ARM_CP_NO_RAW, .access = PL2_W, 742 .writefn = tlbimva_hyp_write }, 743 { .name = "TLBIMVAHIS", .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 1, 744 .type = ARM_CP_NO_RAW, .access = PL2_W, 745 .writefn = tlbimva_hyp_is_write }, 746 { .name = "TLBI_ALLE2", .state = ARM_CP_STATE_AA64, 747 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 0, 748 .access = PL2_W, 749 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 750 .writefn = tlbi_aa64_alle2_write }, 751 { .name = "TLBI_VAE2", .state = ARM_CP_STATE_AA64, 752 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 1, 753 .access = PL2_W, 754 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 755 .writefn = tlbi_aa64_vae2_write }, 756 { .name = "TLBI_VALE2", .state = ARM_CP_STATE_AA64, 757 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 5, 758 .access = PL2_W, 759 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 760 .writefn = tlbi_aa64_vae2_write }, 761 { .name = "TLBI_ALLE2IS", .state = ARM_CP_STATE_AA64, 762 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 0, 763 .access = PL2_W, 764 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 765 .writefn = tlbi_aa64_alle2is_write }, 766 { .name = "TLBI_VAE2IS", .state = ARM_CP_STATE_AA64, 767 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 1, 768 .access = PL2_W, 769 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 770 .writefn = tlbi_aa64_vae2is_write }, 771 { .name = "TLBI_VALE2IS", .state = ARM_CP_STATE_AA64, 772 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 5, 773 .access = PL2_W, 774 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 775 .writefn = tlbi_aa64_vae2is_write }, 776 }; 777 778 static const ARMCPRegInfo tlbi_el3_cp_reginfo[] = { 779 { .name = "TLBI_ALLE3IS", .state = ARM_CP_STATE_AA64, 780 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 0, 781 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 782 .writefn = tlbi_aa64_alle3is_write }, 783 { .name = "TLBI_VAE3IS", .state = ARM_CP_STATE_AA64, 784 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 1, 785 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 786 .writefn = tlbi_aa64_vae3is_write }, 787 { .name = "TLBI_VALE3IS", .state = ARM_CP_STATE_AA64, 788 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 5, 789 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 790 .writefn = tlbi_aa64_vae3is_write }, 791 { .name = "TLBI_ALLE3", .state = ARM_CP_STATE_AA64, 792 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 0, 793 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 794 .writefn = tlbi_aa64_alle3_write }, 795 { .name = "TLBI_VAE3", .state = ARM_CP_STATE_AA64, 796 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 1, 797 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 798 .writefn = tlbi_aa64_vae3_write }, 799 { .name = "TLBI_VALE3", .state = ARM_CP_STATE_AA64, 800 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 5, 801 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 802 .writefn = tlbi_aa64_vae3_write }, 803 }; 804 805 #ifdef TARGET_AARCH64 806 typedef struct { 807 uint64_t base; 808 uint64_t length; 809 } TLBIRange; 810 811 static ARMGranuleSize tlbi_range_tg_to_gran_size(int tg) 812 { 813 /* 814 * Note that the TLBI range TG field encoding differs from both 815 * TG0 and TG1 encodings. 816 */ 817 switch (tg) { 818 case 1: 819 return Gran4K; 820 case 2: 821 return Gran16K; 822 case 3: 823 return Gran64K; 824 default: 825 return GranInvalid; 826 } 827 } 828 829 static TLBIRange tlbi_aa64_get_range(CPUARMState *env, ARMMMUIdx mmuidx, 830 uint64_t value) 831 { 832 unsigned int page_size_granule, page_shift, num, scale, exponent; 833 /* Extract one bit to represent the va selector in use. */ 834 uint64_t select = sextract64(value, 36, 1); 835 ARMVAParameters param = aa64_va_parameters(env, select, mmuidx, true, false); 836 TLBIRange ret = { }; 837 ARMGranuleSize gran; 838 839 page_size_granule = extract64(value, 46, 2); 840 gran = tlbi_range_tg_to_gran_size(page_size_granule); 841 842 /* The granule encoded in value must match the granule in use. */ 843 if (gran != param.gran) { 844 qemu_log_mask(LOG_GUEST_ERROR, "Invalid tlbi page size granule %d\n", 845 page_size_granule); 846 return ret; 847 } 848 849 page_shift = arm_granule_bits(gran); 850 num = extract64(value, 39, 5); 851 scale = extract64(value, 44, 2); 852 exponent = (5 * scale) + 1; 853 854 ret.length = (num + 1) << (exponent + page_shift); 855 856 if (param.select) { 857 ret.base = sextract64(value, 0, 37); 858 } else { 859 ret.base = extract64(value, 0, 37); 860 } 861 if (param.ds) { 862 /* 863 * With DS=1, BaseADDR is always shifted 16 so that it is able 864 * to address all 52 va bits. The input address is perforce 865 * aligned on a 64k boundary regardless of translation granule. 866 */ 867 page_shift = 16; 868 } 869 ret.base <<= page_shift; 870 871 return ret; 872 } 873 874 static void do_rvae_write(CPUARMState *env, uint64_t value, 875 int idxmap, bool synced) 876 { 877 ARMMMUIdx one_idx = ARM_MMU_IDX_A | ctz32(idxmap); 878 TLBIRange range; 879 int bits; 880 881 range = tlbi_aa64_get_range(env, one_idx, value); 882 bits = tlbbits_for_regime(env, one_idx, range.base); 883 884 if (synced) { 885 tlb_flush_range_by_mmuidx_all_cpus_synced(env_cpu(env), 886 range.base, 887 range.length, 888 idxmap, 889 bits); 890 } else { 891 tlb_flush_range_by_mmuidx(env_cpu(env), range.base, 892 range.length, idxmap, bits); 893 } 894 } 895 896 static void tlbi_aa64_rvae1_write(CPUARMState *env, 897 const ARMCPRegInfo *ri, 898 uint64_t value) 899 { 900 /* 901 * Invalidate by VA range, EL1&0. 902 * Currently handles all of RVAE1, RVAAE1, RVAALE1 and RVALE1, 903 * since we don't support flush-for-specific-ASID-only or 904 * flush-last-level-only. 905 */ 906 907 do_rvae_write(env, value, vae1_tlbmask(env), 908 tlb_force_broadcast(env)); 909 } 910 911 static void tlbi_aa64_rvae1is_write(CPUARMState *env, 912 const ARMCPRegInfo *ri, 913 uint64_t value) 914 { 915 /* 916 * Invalidate by VA range, Inner/Outer Shareable EL1&0. 917 * Currently handles all of RVAE1IS, RVAE1OS, RVAAE1IS, RVAAE1OS, 918 * RVAALE1IS, RVAALE1OS, RVALE1IS and RVALE1OS, since we don't support 919 * flush-for-specific-ASID-only, flush-last-level-only or inner/outer 920 * shareable specific flushes. 921 */ 922 923 do_rvae_write(env, value, vae1_tlbmask(env), true); 924 } 925 926 static void tlbi_aa64_rvae2_write(CPUARMState *env, 927 const ARMCPRegInfo *ri, 928 uint64_t value) 929 { 930 /* 931 * Invalidate by VA range, EL2. 932 * Currently handles all of RVAE2 and RVALE2, 933 * since we don't support flush-for-specific-ASID-only or 934 * flush-last-level-only. 935 */ 936 937 do_rvae_write(env, value, vae2_tlbmask(env), 938 tlb_force_broadcast(env)); 939 940 941 } 942 943 static void tlbi_aa64_rvae2is_write(CPUARMState *env, 944 const ARMCPRegInfo *ri, 945 uint64_t value) 946 { 947 /* 948 * Invalidate by VA range, Inner/Outer Shareable, EL2. 949 * Currently handles all of RVAE2IS, RVAE2OS, RVALE2IS and RVALE2OS, 950 * since we don't support flush-for-specific-ASID-only, 951 * flush-last-level-only or inner/outer shareable specific flushes. 952 */ 953 954 do_rvae_write(env, value, vae2_tlbmask(env), true); 955 956 } 957 958 static void tlbi_aa64_rvae3_write(CPUARMState *env, 959 const ARMCPRegInfo *ri, 960 uint64_t value) 961 { 962 /* 963 * Invalidate by VA range, EL3. 964 * Currently handles all of RVAE3 and RVALE3, 965 * since we don't support flush-for-specific-ASID-only or 966 * flush-last-level-only. 967 */ 968 969 do_rvae_write(env, value, ARMMMUIdxBit_E3, tlb_force_broadcast(env)); 970 } 971 972 static void tlbi_aa64_rvae3is_write(CPUARMState *env, 973 const ARMCPRegInfo *ri, 974 uint64_t value) 975 { 976 /* 977 * Invalidate by VA range, EL3, Inner/Outer Shareable. 978 * Currently handles all of RVAE3IS, RVAE3OS, RVALE3IS and RVALE3OS, 979 * since we don't support flush-for-specific-ASID-only, 980 * flush-last-level-only or inner/outer specific flushes. 981 */ 982 983 do_rvae_write(env, value, ARMMMUIdxBit_E3, true); 984 } 985 986 static void tlbi_aa64_ripas2e1_write(CPUARMState *env, const ARMCPRegInfo *ri, 987 uint64_t value) 988 { 989 do_rvae_write(env, value, ipas2e1_tlbmask(env, value), 990 tlb_force_broadcast(env)); 991 } 992 993 static void tlbi_aa64_ripas2e1is_write(CPUARMState *env, 994 const ARMCPRegInfo *ri, 995 uint64_t value) 996 { 997 do_rvae_write(env, value, ipas2e1_tlbmask(env, value), true); 998 } 999 1000 static const ARMCPRegInfo tlbirange_reginfo[] = { 1001 { .name = "TLBI_RVAE1IS", .state = ARM_CP_STATE_AA64, 1002 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 1, 1003 .access = PL1_W, .accessfn = access_ttlbis, 1004 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1005 .fgt = FGT_TLBIRVAE1IS, 1006 .writefn = tlbi_aa64_rvae1is_write }, 1007 { .name = "TLBI_RVAAE1IS", .state = ARM_CP_STATE_AA64, 1008 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 3, 1009 .access = PL1_W, .accessfn = access_ttlbis, 1010 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1011 .fgt = FGT_TLBIRVAAE1IS, 1012 .writefn = tlbi_aa64_rvae1is_write }, 1013 { .name = "TLBI_RVALE1IS", .state = ARM_CP_STATE_AA64, 1014 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 5, 1015 .access = PL1_W, .accessfn = access_ttlbis, 1016 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1017 .fgt = FGT_TLBIRVALE1IS, 1018 .writefn = tlbi_aa64_rvae1is_write }, 1019 { .name = "TLBI_RVAALE1IS", .state = ARM_CP_STATE_AA64, 1020 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 7, 1021 .access = PL1_W, .accessfn = access_ttlbis, 1022 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1023 .fgt = FGT_TLBIRVAALE1IS, 1024 .writefn = tlbi_aa64_rvae1is_write }, 1025 { .name = "TLBI_RVAE1OS", .state = ARM_CP_STATE_AA64, 1026 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 1, 1027 .access = PL1_W, .accessfn = access_ttlbos, 1028 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1029 .fgt = FGT_TLBIRVAE1OS, 1030 .writefn = tlbi_aa64_rvae1is_write }, 1031 { .name = "TLBI_RVAAE1OS", .state = ARM_CP_STATE_AA64, 1032 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 3, 1033 .access = PL1_W, .accessfn = access_ttlbos, 1034 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1035 .fgt = FGT_TLBIRVAAE1OS, 1036 .writefn = tlbi_aa64_rvae1is_write }, 1037 { .name = "TLBI_RVALE1OS", .state = ARM_CP_STATE_AA64, 1038 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 5, 1039 .access = PL1_W, .accessfn = access_ttlbos, 1040 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1041 .fgt = FGT_TLBIRVALE1OS, 1042 .writefn = tlbi_aa64_rvae1is_write }, 1043 { .name = "TLBI_RVAALE1OS", .state = ARM_CP_STATE_AA64, 1044 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 7, 1045 .access = PL1_W, .accessfn = access_ttlbos, 1046 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1047 .fgt = FGT_TLBIRVAALE1OS, 1048 .writefn = tlbi_aa64_rvae1is_write }, 1049 { .name = "TLBI_RVAE1", .state = ARM_CP_STATE_AA64, 1050 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 1, 1051 .access = PL1_W, .accessfn = access_ttlb, 1052 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1053 .fgt = FGT_TLBIRVAE1, 1054 .writefn = tlbi_aa64_rvae1_write }, 1055 { .name = "TLBI_RVAAE1", .state = ARM_CP_STATE_AA64, 1056 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 3, 1057 .access = PL1_W, .accessfn = access_ttlb, 1058 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1059 .fgt = FGT_TLBIRVAAE1, 1060 .writefn = tlbi_aa64_rvae1_write }, 1061 { .name = "TLBI_RVALE1", .state = ARM_CP_STATE_AA64, 1062 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 5, 1063 .access = PL1_W, .accessfn = access_ttlb, 1064 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1065 .fgt = FGT_TLBIRVALE1, 1066 .writefn = tlbi_aa64_rvae1_write }, 1067 { .name = "TLBI_RVAALE1", .state = ARM_CP_STATE_AA64, 1068 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 7, 1069 .access = PL1_W, .accessfn = access_ttlb, 1070 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1071 .fgt = FGT_TLBIRVAALE1, 1072 .writefn = tlbi_aa64_rvae1_write }, 1073 { .name = "TLBI_RIPAS2E1IS", .state = ARM_CP_STATE_AA64, 1074 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 2, 1075 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1076 .writefn = tlbi_aa64_ripas2e1is_write }, 1077 { .name = "TLBI_RIPAS2LE1IS", .state = ARM_CP_STATE_AA64, 1078 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 6, 1079 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1080 .writefn = tlbi_aa64_ripas2e1is_write }, 1081 { .name = "TLBI_RVAE2IS", .state = ARM_CP_STATE_AA64, 1082 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 2, .opc2 = 1, 1083 .access = PL2_W, 1084 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1085 .writefn = tlbi_aa64_rvae2is_write }, 1086 { .name = "TLBI_RVALE2IS", .state = ARM_CP_STATE_AA64, 1087 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 2, .opc2 = 5, 1088 .access = PL2_W, 1089 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1090 .writefn = tlbi_aa64_rvae2is_write }, 1091 { .name = "TLBI_RIPAS2E1", .state = ARM_CP_STATE_AA64, 1092 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 2, 1093 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1094 .writefn = tlbi_aa64_ripas2e1_write }, 1095 { .name = "TLBI_RIPAS2LE1", .state = ARM_CP_STATE_AA64, 1096 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 6, 1097 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1098 .writefn = tlbi_aa64_ripas2e1_write }, 1099 { .name = "TLBI_RVAE2OS", .state = ARM_CP_STATE_AA64, 1100 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 5, .opc2 = 1, 1101 .access = PL2_W, 1102 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1103 .writefn = tlbi_aa64_rvae2is_write }, 1104 { .name = "TLBI_RVALE2OS", .state = ARM_CP_STATE_AA64, 1105 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 5, .opc2 = 5, 1106 .access = PL2_W, 1107 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1108 .writefn = tlbi_aa64_rvae2is_write }, 1109 { .name = "TLBI_RVAE2", .state = ARM_CP_STATE_AA64, 1110 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 6, .opc2 = 1, 1111 .access = PL2_W, 1112 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1113 .writefn = tlbi_aa64_rvae2_write }, 1114 { .name = "TLBI_RVALE2", .state = ARM_CP_STATE_AA64, 1115 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 6, .opc2 = 5, 1116 .access = PL2_W, 1117 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1118 .writefn = tlbi_aa64_rvae2_write }, 1119 { .name = "TLBI_RVAE3IS", .state = ARM_CP_STATE_AA64, 1120 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 2, .opc2 = 1, 1121 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1122 .writefn = tlbi_aa64_rvae3is_write }, 1123 { .name = "TLBI_RVALE3IS", .state = ARM_CP_STATE_AA64, 1124 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 2, .opc2 = 5, 1125 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1126 .writefn = tlbi_aa64_rvae3is_write }, 1127 { .name = "TLBI_RVAE3OS", .state = ARM_CP_STATE_AA64, 1128 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 5, .opc2 = 1, 1129 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1130 .writefn = tlbi_aa64_rvae3is_write }, 1131 { .name = "TLBI_RVALE3OS", .state = ARM_CP_STATE_AA64, 1132 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 5, .opc2 = 5, 1133 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1134 .writefn = tlbi_aa64_rvae3is_write }, 1135 { .name = "TLBI_RVAE3", .state = ARM_CP_STATE_AA64, 1136 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 6, .opc2 = 1, 1137 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1138 .writefn = tlbi_aa64_rvae3_write }, 1139 { .name = "TLBI_RVALE3", .state = ARM_CP_STATE_AA64, 1140 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 6, .opc2 = 5, 1141 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1142 .writefn = tlbi_aa64_rvae3_write }, 1143 }; 1144 1145 static const ARMCPRegInfo tlbios_reginfo[] = { 1146 { .name = "TLBI_VMALLE1OS", .state = ARM_CP_STATE_AA64, 1147 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 0, 1148 .access = PL1_W, .accessfn = access_ttlbos, 1149 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1150 .fgt = FGT_TLBIVMALLE1OS, 1151 .writefn = tlbi_aa64_vmalle1is_write }, 1152 { .name = "TLBI_VAE1OS", .state = ARM_CP_STATE_AA64, 1153 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 1, 1154 .fgt = FGT_TLBIVAE1OS, 1155 .access = PL1_W, .accessfn = access_ttlbos, 1156 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1157 .writefn = tlbi_aa64_vae1is_write }, 1158 { .name = "TLBI_ASIDE1OS", .state = ARM_CP_STATE_AA64, 1159 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 2, 1160 .access = PL1_W, .accessfn = access_ttlbos, 1161 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1162 .fgt = FGT_TLBIASIDE1OS, 1163 .writefn = tlbi_aa64_vmalle1is_write }, 1164 { .name = "TLBI_VAAE1OS", .state = ARM_CP_STATE_AA64, 1165 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 3, 1166 .access = PL1_W, .accessfn = access_ttlbos, 1167 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1168 .fgt = FGT_TLBIVAAE1OS, 1169 .writefn = tlbi_aa64_vae1is_write }, 1170 { .name = "TLBI_VALE1OS", .state = ARM_CP_STATE_AA64, 1171 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 5, 1172 .access = PL1_W, .accessfn = access_ttlbos, 1173 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1174 .fgt = FGT_TLBIVALE1OS, 1175 .writefn = tlbi_aa64_vae1is_write }, 1176 { .name = "TLBI_VAALE1OS", .state = ARM_CP_STATE_AA64, 1177 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 7, 1178 .access = PL1_W, .accessfn = access_ttlbos, 1179 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1180 .fgt = FGT_TLBIVAALE1OS, 1181 .writefn = tlbi_aa64_vae1is_write }, 1182 { .name = "TLBI_ALLE2OS", .state = ARM_CP_STATE_AA64, 1183 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 0, 1184 .access = PL2_W, 1185 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1186 .writefn = tlbi_aa64_alle2is_write }, 1187 { .name = "TLBI_VAE2OS", .state = ARM_CP_STATE_AA64, 1188 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 1, 1189 .access = PL2_W, 1190 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1191 .writefn = tlbi_aa64_vae2is_write }, 1192 { .name = "TLBI_ALLE1OS", .state = ARM_CP_STATE_AA64, 1193 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 4, 1194 .access = PL2_W, 1195 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1196 .writefn = tlbi_aa64_alle1is_write }, 1197 { .name = "TLBI_VALE2OS", .state = ARM_CP_STATE_AA64, 1198 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 5, 1199 .access = PL2_W, 1200 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF, 1201 .writefn = tlbi_aa64_vae2is_write }, 1202 { .name = "TLBI_VMALLS12E1OS", .state = ARM_CP_STATE_AA64, 1203 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 6, 1204 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1205 .writefn = tlbi_aa64_alle1is_write }, 1206 { .name = "TLBI_IPAS2E1OS", .state = ARM_CP_STATE_AA64, 1207 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 0, 1208 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS }, 1209 { .name = "TLBI_RIPAS2E1OS", .state = ARM_CP_STATE_AA64, 1210 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 3, 1211 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS }, 1212 { .name = "TLBI_IPAS2LE1OS", .state = ARM_CP_STATE_AA64, 1213 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 4, 1214 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS }, 1215 { .name = "TLBI_RIPAS2LE1OS", .state = ARM_CP_STATE_AA64, 1216 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 7, 1217 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS }, 1218 { .name = "TLBI_ALLE3OS", .state = ARM_CP_STATE_AA64, 1219 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 0, 1220 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1221 .writefn = tlbi_aa64_alle3is_write }, 1222 { .name = "TLBI_VAE3OS", .state = ARM_CP_STATE_AA64, 1223 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 1, 1224 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1225 .writefn = tlbi_aa64_vae3is_write }, 1226 { .name = "TLBI_VALE3OS", .state = ARM_CP_STATE_AA64, 1227 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 5, 1228 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS, 1229 .writefn = tlbi_aa64_vae3is_write }, 1230 }; 1231 1232 static void tlbi_aa64_paall_write(CPUARMState *env, const ARMCPRegInfo *ri, 1233 uint64_t value) 1234 { 1235 CPUState *cs = env_cpu(env); 1236 1237 tlb_flush(cs); 1238 } 1239 1240 static void tlbi_aa64_paallos_write(CPUARMState *env, const ARMCPRegInfo *ri, 1241 uint64_t value) 1242 { 1243 CPUState *cs = env_cpu(env); 1244 1245 tlb_flush_all_cpus_synced(cs); 1246 } 1247 1248 static const ARMCPRegInfo tlbi_rme_reginfo[] = { 1249 { .name = "TLBI_PAALL", .state = ARM_CP_STATE_AA64, 1250 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 4, 1251 .access = PL3_W, .type = ARM_CP_NO_RAW, 1252 .writefn = tlbi_aa64_paall_write }, 1253 { .name = "TLBI_PAALLOS", .state = ARM_CP_STATE_AA64, 1254 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 4, 1255 .access = PL3_W, .type = ARM_CP_NO_RAW, 1256 .writefn = tlbi_aa64_paallos_write }, 1257 /* 1258 * QEMU does not have a way to invalidate by physical address, thus 1259 * invalidating a range of physical addresses is accomplished by 1260 * flushing all tlb entries in the outer shareable domain, 1261 * just like PAALLOS. 1262 */ 1263 { .name = "TLBI_RPALOS", .state = ARM_CP_STATE_AA64, 1264 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 4, .opc2 = 7, 1265 .access = PL3_W, .type = ARM_CP_NO_RAW, 1266 .writefn = tlbi_aa64_paallos_write }, 1267 { .name = "TLBI_RPAOS", .state = ARM_CP_STATE_AA64, 1268 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 4, .opc2 = 3, 1269 .access = PL3_W, .type = ARM_CP_NO_RAW, 1270 .writefn = tlbi_aa64_paallos_write }, 1271 }; 1272 1273 #endif 1274 1275 void define_tlb_insn_regs(ARMCPU *cpu) 1276 { 1277 CPUARMState *env = &cpu->env; 1278 1279 if (!arm_feature(env, ARM_FEATURE_V7)) { 1280 define_arm_cp_regs(cpu, tlbi_not_v7_cp_reginfo); 1281 } else { 1282 define_arm_cp_regs(cpu, tlbi_v7_cp_reginfo); 1283 } 1284 if (arm_feature(env, ARM_FEATURE_V7MP) && 1285 !arm_feature(env, ARM_FEATURE_PMSA)) { 1286 define_arm_cp_regs(cpu, tlbi_v7mp_cp_reginfo); 1287 } 1288 if (arm_feature(env, ARM_FEATURE_V8)) { 1289 define_arm_cp_regs(cpu, tlbi_v8_cp_reginfo); 1290 } 1291 /* 1292 * We retain the existing logic for when to register these TLBI 1293 * ops (i.e. matching the condition for el2_cp_reginfo[] in 1294 * helper.c), but we will be able to simplify this later. 1295 */ 1296 if (arm_feature(env, ARM_FEATURE_EL2)) { 1297 define_arm_cp_regs(cpu, tlbi_el2_cp_reginfo); 1298 } 1299 if (arm_feature(env, ARM_FEATURE_EL3)) { 1300 define_arm_cp_regs(cpu, tlbi_el3_cp_reginfo); 1301 } 1302 #ifdef TARGET_AARCH64 1303 if (cpu_isar_feature(aa64_tlbirange, cpu)) { 1304 define_arm_cp_regs(cpu, tlbirange_reginfo); 1305 } 1306 if (cpu_isar_feature(aa64_tlbios, cpu)) { 1307 define_arm_cp_regs(cpu, tlbios_reginfo); 1308 } 1309 if (cpu_isar_feature(aa64_rme, cpu)) { 1310 define_arm_cp_regs(cpu, tlbi_rme_reginfo); 1311 } 1312 #endif 1313 } 1314