Lines Matching full:cpu
2 * QEMU ARM CPU
30 #include "cpu.h"
33 #include "accel/tcg/cpu-ops.h"
36 #include "cpu-features.h"
53 #include "target/arm/cpu-qom.h"
58 ARMCPU *cpu = ARM_CPU(cs); in arm_cpu_set_pc() local
59 CPUARMState *env = &cpu->env; in arm_cpu_set_pc()
72 ARMCPU *cpu = ARM_CPU(cs); in arm_cpu_get_pc() local
73 CPUARMState *env = &cpu->env; in arm_cpu_get_pc()
143 ARMCPU *cpu = ARM_CPU(cs); in arm_cpu_has_work() local
145 return (cpu->power_state != PSCI_OFF) in arm_cpu_has_work()
154 void arm_register_pre_el_change_hook(ARMCPU *cpu, ARMELChangeHookFn *hook, in arm_register_pre_el_change_hook() argument
162 QLIST_INSERT_HEAD(&cpu->pre_el_change_hooks, entry, node); in arm_register_pre_el_change_hook()
165 void arm_register_el_change_hook(ARMCPU *cpu, ARMELChangeHookFn *hook, in arm_register_el_change_hook() argument
173 QLIST_INSERT_HEAD(&cpu->el_change_hooks, entry, node); in arm_register_el_change_hook()
180 ARMCPU *cpu = opaque; in cp_reg_reset() local
187 ri->resetfn(&cpu->env, ri); in cp_reg_reset()
201 CPREG_FIELD64(&cpu->env, ri) = ri->resetvalue; in cp_reg_reset()
203 CPREG_FIELD32(&cpu->env, ri) = ri->resetvalue; in cp_reg_reset()
215 ARMCPU *cpu = opaque; in cp_reg_check_reset() local
222 oldvalue = read_raw_cp_reg(&cpu->env, ri); in cp_reg_check_reset()
224 newvalue = read_raw_cp_reg(&cpu->env, ri); in cp_reg_check_reset()
230 CPUState *cs = CPU(obj); in arm_cpu_reset_hold()
231 ARMCPU *cpu = ARM_CPU(cs); in arm_cpu_reset_hold() local
233 CPUARMState *env = &cpu->env; in arm_cpu_reset_hold()
241 g_hash_table_foreach(cpu->cp_regs, cp_reg_reset, cpu); in arm_cpu_reset_hold()
242 g_hash_table_foreach(cpu->cp_regs, cp_reg_check_reset, cpu); in arm_cpu_reset_hold()
244 env->vfp.xregs[ARM_VFP_FPSID] = cpu->reset_fpsid; in arm_cpu_reset_hold()
245 env->vfp.xregs[ARM_VFP_MVFR0] = cpu->isar.mvfr0; in arm_cpu_reset_hold()
246 env->vfp.xregs[ARM_VFP_MVFR1] = cpu->isar.mvfr1; in arm_cpu_reset_hold()
247 env->vfp.xregs[ARM_VFP_MVFR2] = cpu->isar.mvfr2; in arm_cpu_reset_hold()
249 cpu->power_state = cs->start_powered_off ? PSCI_OFF : PSCI_ON; in arm_cpu_reset_hold()
268 if (cpu_isar_feature(aa64_tidcp1, cpu)) { in arm_cpu_reset_hold()
275 if (cpu_isar_feature(aa64_sve, cpu)) { in arm_cpu_reset_hold()
278 env->vfp.zcr_el[1] = cpu->sve_default_vq - 1; in arm_cpu_reset_hold()
281 if (cpu_isar_feature(aa64_sme, cpu)) { in arm_cpu_reset_hold()
285 env->vfp.smcr_el[1] = cpu->sme_default_vq - 1; in arm_cpu_reset_hold()
286 if (cpu_isar_feature(aa64_sme_fa64, cpu)) { in arm_cpu_reset_hold()
299 if (cpu_isar_feature(aa64_mte, cpu)) { in arm_cpu_reset_hold()
332 env->cp15.rvbar = cpu->rvbar_prop; in arm_cpu_reset_hold()
344 env->cp15.rvbar = cpu->rvbar_prop; in arm_cpu_reset_hold()
345 env->regs[15] = cpu->rvbar_prop; in arm_cpu_reset_hold()
392 if (cpu_isar_feature(aa32_lob, cpu)) { in arm_cpu_reset_hold()
417 * v8.1M the guest-visible value of NSACR in a CPU without the in arm_cpu_reset_hold()
425 * it dependent on CPU model. In v8M it is RES1. in arm_cpu_reset_hold()
439 if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in arm_cpu_reset_hold()
449 env->v7m.vecbase[M_REG_S] = cpu->init_svtor & 0xffffff80; in arm_cpu_reset_hold()
450 env->v7m.vecbase[M_REG_NS] = cpu->init_nsvtor & 0xffffff80; in arm_cpu_reset_hold()
500 if (cpu->pmsav7_dregion > 0) { in arm_cpu_reset_hold()
504 * cpu->pmsav7_dregion); in arm_cpu_reset_hold()
507 * cpu->pmsav7_dregion); in arm_cpu_reset_hold()
511 * cpu->pmsav7_dregion); in arm_cpu_reset_hold()
514 * cpu->pmsav7_dregion); in arm_cpu_reset_hold()
518 sizeof(*env->pmsav7.drbar) * cpu->pmsav7_dregion); in arm_cpu_reset_hold()
520 sizeof(*env->pmsav7.drsr) * cpu->pmsav7_dregion); in arm_cpu_reset_hold()
522 sizeof(*env->pmsav7.dracr) * cpu->pmsav7_dregion); in arm_cpu_reset_hold()
526 if (cpu->pmsav8r_hdregion > 0) { in arm_cpu_reset_hold()
528 sizeof(*env->pmsav8.hprbar) * cpu->pmsav8r_hdregion); in arm_cpu_reset_hold()
530 sizeof(*env->pmsav8.hprlar) * cpu->pmsav8r_hdregion); in arm_cpu_reset_hold()
542 if (cpu->sau_sregion > 0) { in arm_cpu_reset_hold()
543 memset(env->sau.rbar, 0, sizeof(*env->sau.rbar) * cpu->sau_sregion); in arm_cpu_reset_hold()
544 memset(env->sau.rlar, 0, sizeof(*env->sau.rlar) * cpu->sau_sregion); in arm_cpu_reset_hold()
570 kvm_arm_reset_vcpu(cpu); in arm_cpu_reset_hold()
575 hw_breakpoint_update_all(cpu); in arm_cpu_reset_hold()
576 hw_watchpoint_update_all(cpu); in arm_cpu_reset_hold()
584 ARMCPU *cpu = ARM_CPU(cpustate); in arm_emulate_firmware_reset() local
585 CPUARMState *env = &cpu->env; in arm_emulate_firmware_reset()
620 if (cpu_isar_feature(aa64_pauth, cpu)) { in arm_emulate_firmware_reset()
623 if (cpu_isar_feature(aa64_mte, cpu)) { in arm_emulate_firmware_reset()
626 if (cpu_isar_feature(aa64_sve, cpu)) { in arm_emulate_firmware_reset()
630 if (cpu_isar_feature(aa64_sme, cpu)) { in arm_emulate_firmware_reset()
635 if (cpu_isar_feature(aa64_hcx, cpu)) { in arm_emulate_firmware_reset()
638 if (cpu_isar_feature(aa64_fgt, cpu)) { in arm_emulate_firmware_reset()
648 /* Put CPU into non-secure state */ in arm_emulate_firmware_reset()
661 /* Set the CPU to the desired state */ in arm_emulate_firmware_reset()
942 void arm_cpu_update_virq(ARMCPU *cpu) in arm_cpu_update_virq() argument
948 CPUARMState *env = &cpu->env; in arm_cpu_update_virq()
949 CPUState *cs = CPU(cpu); in arm_cpu_update_virq()
964 void arm_cpu_update_vfiq(ARMCPU *cpu) in arm_cpu_update_vfiq() argument
970 CPUARMState *env = &cpu->env; in arm_cpu_update_vfiq()
971 CPUState *cs = CPU(cpu); in arm_cpu_update_vfiq()
986 void arm_cpu_update_vinmi(ARMCPU *cpu) in arm_cpu_update_vinmi() argument
992 CPUARMState *env = &cpu->env; in arm_cpu_update_vinmi()
993 CPUState *cs = CPU(cpu); in arm_cpu_update_vinmi()
1008 void arm_cpu_update_vfnmi(ARMCPU *cpu) in arm_cpu_update_vfnmi() argument
1013 CPUARMState *env = &cpu->env; in arm_cpu_update_vfnmi()
1014 CPUState *cs = CPU(cpu); in arm_cpu_update_vfnmi()
1028 void arm_cpu_update_vserr(ARMCPU *cpu) in arm_cpu_update_vserr() argument
1033 CPUARMState *env = &cpu->env; in arm_cpu_update_vserr()
1034 CPUState *cs = CPU(cpu); in arm_cpu_update_vserr()
1050 ARMCPU *cpu = opaque; in arm_cpu_set_irq() local
1051 CPUARMState *env = &cpu->env; in arm_cpu_set_irq()
1052 CPUState *cs = CPU(cpu); in arm_cpu_set_irq()
1080 arm_cpu_update_virq(cpu); in arm_cpu_set_irq()
1083 arm_cpu_update_vfiq(cpu); in arm_cpu_set_irq()
1086 arm_cpu_update_vinmi(cpu); in arm_cpu_set_irq()
1104 ARMCPU *cpu = ARM_CPU(cs); in arm_cpu_virtio_is_big_endian() local
1105 CPUARMState *env = &cpu->env; in arm_cpu_virtio_is_big_endian()
1118 ARMCPU *cpu = ARM_CPU(cs); in arm_cpu_exec_halt() local
1119 if (cpu->wfxt_timer) { in arm_cpu_exec_halt()
1120 timer_del(cpu->wfxt_timer); in arm_cpu_exec_halt()
1129 ARMCPU *cpu = opaque; in arm_wfxt_timer_cb() local
1130 CPUState *cs = CPU(cpu); in arm_wfxt_timer_cb()
1133 * We expect the CPU to be halted; this will cause arm_cpu_is_work() in arm_wfxt_timer_cb()
1142 static void arm_disas_set_info(CPUState *cpu, disassemble_info *info) in arm_disas_set_info() argument
1144 ARMCPU *ac = ARM_CPU(cpu); in arm_disas_set_info()
1187 ARMCPU *cpu = ARM_CPU(cs); in aarch64_cpu_dump_state() local
1188 CPUARMState *env = &cpu->env; in aarch64_cpu_dump_state()
1221 if (cpu_isar_feature(aa64_sme, cpu)) { in aarch64_cpu_dump_state()
1227 if (cpu_isar_feature(aa64_bti, cpu)) { in aarch64_cpu_dump_state()
1245 if (cpu_isar_feature(aa64_sme, cpu) && FIELD_EX64(env->svcr, SVCR, SM)) { in aarch64_cpu_dump_state()
1247 } else if (cpu_isar_feature(aa64_sve, cpu)) { in aarch64_cpu_dump_state()
1323 if (cpu_isar_feature(aa64_sme, cpu) && in aarch64_cpu_dump_state()
1344 ARMCPU *cpu = ARM_CPU(cs); in arm_cpu_dump_state() local
1345 CPUARMState *env = &cpu->env; in arm_cpu_dump_state()
1412 if (cpu_isar_feature(aa32_simd_r32, cpu)) { in arm_cpu_dump_state()
1414 } else if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in arm_cpu_dump_state()
1425 if (cpu_isar_feature(aa32_mve, cpu)) { in arm_cpu_dump_state()
1438 uint64_t arm_cpu_mp_affinity(ARMCPU *cpu) in arm_cpu_mp_affinity() argument
1440 return cpu->mp_affinity; in arm_cpu_mp_affinity()
1445 ARMCPU *cpu = ARM_CPU(obj); in arm_cpu_initfn() local
1447 cpu->cp_regs = g_hash_table_new_full(g_direct_hash, g_direct_equal, in arm_cpu_initfn()
1450 QLIST_INIT(&cpu->pre_el_change_hooks); in arm_cpu_initfn()
1451 QLIST_INIT(&cpu->el_change_hooks); in arm_cpu_initfn()
1459 * and our corresponding cpu property. in arm_cpu_initfn()
1461 cpu->sve_default_vq = 4; in arm_cpu_initfn()
1462 cpu->sme_default_vq = 2; in arm_cpu_initfn()
1471 qdev_init_gpio_in(DEVICE(cpu), arm_cpu_kvm_set_irq, 6); in arm_cpu_initfn()
1473 qdev_init_gpio_in(DEVICE(cpu), arm_cpu_set_irq, 6); in arm_cpu_initfn()
1476 qdev_init_gpio_out(DEVICE(cpu), cpu->gt_timer_outputs, in arm_cpu_initfn()
1477 ARRAY_SIZE(cpu->gt_timer_outputs)); in arm_cpu_initfn()
1479 qdev_init_gpio_out_named(DEVICE(cpu), &cpu->gicv3_maintenance_interrupt, in arm_cpu_initfn()
1481 qdev_init_gpio_out_named(DEVICE(cpu), &cpu->pmu_interrupt, in arm_cpu_initfn()
1489 cpu->dtb_compatible = "qemu,unknown"; in arm_cpu_initfn()
1490 cpu->psci_version = QEMU_PSCI_VERSION_0_1; /* By default assume PSCI v0.1 */ in arm_cpu_initfn()
1491 cpu->kvm_target = QEMU_KVM_ARM_TARGET_NONE; in arm_cpu_initfn()
1495 cpu->psci_version = QEMU_PSCI_VERSION_1_1; in arm_cpu_initfn()
1501 * on the CPU type, and is set in the realize fn.
1539 * because the CPU initfn will have already set cpu->pmsav7_dregion to
1540 * the right value for that particular CPU type, and we don't want
1550 ARMCPU *cpu = ARM_CPU(obj); in arm_get_pmu() local
1552 return cpu->has_pmu; in arm_get_pmu()
1557 ARMCPU *cpu = ARM_CPU(obj); in arm_set_pmu() local
1564 set_feature(&cpu->env, ARM_FEATURE_PMU); in arm_set_pmu()
1566 unset_feature(&cpu->env, ARM_FEATURE_PMU); in arm_set_pmu()
1568 cpu->has_pmu = value; in arm_set_pmu()
1573 ARMCPU *cpu = ARM_CPU(obj); in aarch64_cpu_get_aarch64() local
1575 return arm_feature(&cpu->env, ARM_FEATURE_AARCH64); in aarch64_cpu_get_aarch64()
1580 ARMCPU *cpu = ARM_CPU(obj); in aarch64_cpu_set_aarch64() local
1594 unset_feature(&cpu->env, ARM_FEATURE_AARCH64); in aarch64_cpu_set_aarch64()
1596 set_feature(&cpu->env, ARM_FEATURE_AARCH64); in aarch64_cpu_set_aarch64()
1600 unsigned int gt_cntfrq_period_ns(ARMCPU *cpu) in gt_cntfrq_period_ns() argument
1605 * muldiv64(qemu_clock_get_ns(QEMU_CLOCK_VIRTUAL), cpu->gt_cntfrq_hz, in gt_cntfrq_period_ns()
1613 * `(NANOSECONDS_PER_SECOND % cpu->gt_cntfrq) > 0` holds. Failing to in gt_cntfrq_period_ns()
1620 return NANOSECONDS_PER_SECOND > cpu->gt_cntfrq_hz ? in gt_cntfrq_period_ns()
1621 NANOSECONDS_PER_SECOND / cpu->gt_cntfrq_hz : 1; in gt_cntfrq_period_ns()
1624 static void arm_cpu_propagate_feature_implications(ARMCPU *cpu) in arm_cpu_propagate_feature_implications() argument
1626 CPUARMState *env = &cpu->env; in arm_cpu_propagate_feature_implications()
1654 if (arm_feature(&cpu->env, ARM_FEATURE_AARCH64)) { in arm_cpu_propagate_feature_implications()
1655 no_aa32 = !cpu_isar_feature(aa64_aa32, cpu); in arm_cpu_propagate_feature_implications()
1663 * CPUs or CPU configs which have no actual EL2 or EL3 but do in arm_cpu_propagate_feature_implications()
1669 cpu_isar_feature(aa32_arm_div, cpu)); in arm_cpu_propagate_feature_implications()
1697 cpu_isar_feature(aa32_jazelle, cpu)); in arm_cpu_propagate_feature_implications()
1718 ARMCPU *cpu = ARM_CPU(obj); in arm_cpu_post_init() local
1725 arm_cpu_propagate_feature_implications(cpu); in arm_cpu_post_init()
1727 if (arm_feature(&cpu->env, ARM_FEATURE_AARCH64)) { in arm_cpu_post_init()
1734 if (arm_feature(&cpu->env, ARM_FEATURE_CBAR) || in arm_cpu_post_init()
1735 arm_feature(&cpu->env, ARM_FEATURE_CBAR_RO)) { in arm_cpu_post_init()
1739 if (!arm_feature(&cpu->env, ARM_FEATURE_M)) { in arm_cpu_post_init()
1743 if (arm_feature(&cpu->env, ARM_FEATURE_V8)) { in arm_cpu_post_init()
1745 &cpu->rvbar_prop, in arm_cpu_post_init()
1750 if (arm_feature(&cpu->env, ARM_FEATURE_EL3)) { in arm_cpu_post_init()
1751 /* Add the has_el3 state CPU property only if EL3 is allowed. This will in arm_cpu_post_init()
1758 (Object **)&cpu->secure_memory, in arm_cpu_post_init()
1763 if (arm_feature(&cpu->env, ARM_FEATURE_EL2)) { in arm_cpu_post_init()
1768 if (arm_feature(&cpu->env, ARM_FEATURE_PMU)) { in arm_cpu_post_init()
1769 cpu->has_pmu = true; in arm_cpu_post_init()
1778 if (arm_feature(&cpu->env, ARM_FEATURE_AARCH64)) { in arm_cpu_post_init()
1779 if (cpu_isar_feature(aa64_fp_simd, cpu)) { in arm_cpu_post_init()
1780 cpu->has_vfp = true; in arm_cpu_post_init()
1781 cpu->has_vfp_d32 = true; in arm_cpu_post_init()
1787 } else if (cpu_isar_feature(aa32_vfp, cpu)) { in arm_cpu_post_init()
1788 cpu->has_vfp = true; in arm_cpu_post_init()
1793 if (cpu_isar_feature(aa32_simd_r32, cpu)) { in arm_cpu_post_init()
1794 cpu->has_vfp_d32 = true; in arm_cpu_post_init()
1801 && !(arm_feature(&cpu->env, ARM_FEATURE_V8) in arm_cpu_post_init()
1802 && !arm_feature(&cpu->env, ARM_FEATURE_M))) { in arm_cpu_post_init()
1809 if (arm_feature(&cpu->env, ARM_FEATURE_NEON)) { in arm_cpu_post_init()
1810 cpu->has_neon = true; in arm_cpu_post_init()
1816 if (arm_feature(&cpu->env, ARM_FEATURE_M) && in arm_cpu_post_init()
1817 arm_feature(&cpu->env, ARM_FEATURE_THUMB_DSP)) { in arm_cpu_post_init()
1821 if (arm_feature(&cpu->env, ARM_FEATURE_PMSA)) { in arm_cpu_post_init()
1823 if (arm_feature(&cpu->env, ARM_FEATURE_V7)) { in arm_cpu_post_init()
1829 if (arm_feature(&cpu->env, ARM_FEATURE_M_SECURITY)) { in arm_cpu_post_init()
1830 object_property_add_link(obj, "idau", TYPE_IDAU_INTERFACE, &cpu->idau, in arm_cpu_post_init()
1839 &cpu->init_svtor, in arm_cpu_post_init()
1842 if (arm_feature(&cpu->env, ARM_FEATURE_M)) { in arm_cpu_post_init()
1848 &cpu->init_nsvtor, in arm_cpu_post_init()
1854 &cpu->psci_conduit, in arm_cpu_post_init()
1859 if (arm_feature(&cpu->env, ARM_FEATURE_GENERIC_TIMER)) { in arm_cpu_post_init()
1860 qdev_property_add_static(DEVICE(cpu), &arm_cpu_gt_cntfrq_property); in arm_cpu_post_init()
1864 kvm_arm_add_vcpu_properties(cpu); in arm_cpu_post_init()
1868 if (arm_feature(&cpu->env, ARM_FEATURE_AARCH64) && in arm_cpu_post_init()
1869 cpu_isar_feature(aa64_mte, cpu)) { in arm_cpu_post_init()
1872 (Object **)&cpu->tag_memory, in arm_cpu_post_init()
1876 if (arm_feature(&cpu->env, ARM_FEATURE_EL3)) { in arm_cpu_post_init()
1879 (Object **)&cpu->secure_tag_memory, in arm_cpu_post_init()
1889 ARMCPU *cpu = ARM_CPU(obj); in arm_cpu_finalizefn() local
1892 g_hash_table_destroy(cpu->cp_regs); in arm_cpu_finalizefn()
1894 QLIST_FOREACH_SAFE(hook, &cpu->pre_el_change_hooks, node, next) { in arm_cpu_finalizefn()
1898 QLIST_FOREACH_SAFE(hook, &cpu->el_change_hooks, node, next) { in arm_cpu_finalizefn()
1903 if (cpu->pmu_timer) { in arm_cpu_finalizefn()
1904 timer_free(cpu->pmu_timer); in arm_cpu_finalizefn()
1906 if (cpu->wfxt_timer) { in arm_cpu_finalizefn()
1907 timer_free(cpu->wfxt_timer); in arm_cpu_finalizefn()
1912 void arm_cpu_finalize_features(ARMCPU *cpu, Error **errp) in arm_cpu_finalize_features() argument
1916 if (arm_feature(&cpu->env, ARM_FEATURE_AARCH64)) { in arm_cpu_finalize_features()
1917 arm_cpu_sve_finalize(cpu, &local_err); in arm_cpu_finalize_features()
1929 if (cpu_isar_feature(aa64_sme, cpu) && !cpu_isar_feature(aa64_sve, cpu)) { in arm_cpu_finalize_features()
1930 object_property_set_bool(OBJECT(cpu), "sme", false, &error_abort); in arm_cpu_finalize_features()
1933 arm_cpu_sme_finalize(cpu, &local_err); in arm_cpu_finalize_features()
1939 arm_cpu_pauth_finalize(cpu, &local_err); in arm_cpu_finalize_features()
1945 arm_cpu_lpa2_finalize(cpu, &local_err); in arm_cpu_finalize_features()
1953 kvm_arm_steal_time_finalize(cpu, &local_err); in arm_cpu_finalize_features()
1963 CPUState *cs = CPU(dev); in arm_cpu_realizefn()
1964 ARMCPU *cpu = ARM_CPU(dev); in arm_cpu_realizefn() local
1966 CPUARMState *env = &cpu->env; in arm_cpu_realizefn()
1974 /* If we needed to query the host kernel for the CPU features in arm_cpu_realizefn()
1978 if (cpu->host_cpu_probe_failed) { in arm_cpu_realizefn()
1980 error_setg(errp, "The 'host' CPU type can only be used with KVM or HVF"); in arm_cpu_realizefn()
1982 error_setg(errp, "Failed to retrieve host CPU features"); in arm_cpu_realizefn()
1987 if (!cpu->gt_cntfrq_hz) { in arm_cpu_realizefn()
1996 * - for QEMU CPU types added before we standardized on 1GHz in arm_cpu_realizefn()
2000 cpu->backcompat_cntfrq) { in arm_cpu_realizefn()
2001 cpu->gt_cntfrq_hz = GTIMER_BACKCOMPAT_HZ; in arm_cpu_realizefn()
2003 cpu->gt_cntfrq_hz = GTIMER_DEFAULT_HZ; in arm_cpu_realizefn()
2008 /* The NVIC and M-profile CPU are two halves of a single piece of in arm_cpu_realizefn()
2031 * address space for the CPU (otherwise we will assert() later in in arm_cpu_realizefn()
2036 "Cannot enable %s when using an M-profile guest CPU", in arm_cpu_realizefn()
2040 if (cpu->has_el3) { in arm_cpu_realizefn()
2042 "Cannot enable %s when guest CPU has EL3 enabled", in arm_cpu_realizefn()
2046 if (cpu->tag_memory) { in arm_cpu_realizefn()
2055 uint64_t scale = gt_cntfrq_period_ns(cpu); in arm_cpu_realizefn()
2057 cpu->gt_timer[GTIMER_PHYS] = timer_new(QEMU_CLOCK_VIRTUAL, scale, in arm_cpu_realizefn()
2058 arm_gt_ptimer_cb, cpu); in arm_cpu_realizefn()
2059 cpu->gt_timer[GTIMER_VIRT] = timer_new(QEMU_CLOCK_VIRTUAL, scale, in arm_cpu_realizefn()
2060 arm_gt_vtimer_cb, cpu); in arm_cpu_realizefn()
2061 cpu->gt_timer[GTIMER_HYP] = timer_new(QEMU_CLOCK_VIRTUAL, scale, in arm_cpu_realizefn()
2062 arm_gt_htimer_cb, cpu); in arm_cpu_realizefn()
2063 cpu->gt_timer[GTIMER_SEC] = timer_new(QEMU_CLOCK_VIRTUAL, scale, in arm_cpu_realizefn()
2064 arm_gt_stimer_cb, cpu); in arm_cpu_realizefn()
2065 cpu->gt_timer[GTIMER_HYPVIRT] = timer_new(QEMU_CLOCK_VIRTUAL, scale, in arm_cpu_realizefn()
2066 arm_gt_hvtimer_cb, cpu); in arm_cpu_realizefn()
2067 cpu->gt_timer[GTIMER_S_EL2_PHYS] = timer_new(QEMU_CLOCK_VIRTUAL, scale, in arm_cpu_realizefn()
2068 arm_gt_sel2timer_cb, cpu); in arm_cpu_realizefn()
2069 cpu->gt_timer[GTIMER_S_EL2_VIRT] = timer_new(QEMU_CLOCK_VIRTUAL, scale, in arm_cpu_realizefn()
2070 arm_gt_sel2vtimer_cb, cpu); in arm_cpu_realizefn()
2080 arm_cpu_finalize_features(cpu, &local_err); in arm_cpu_realizefn()
2094 cpu->ctr = FIELD_DP64(cpu->ctr, CTR_EL0, DIC, 0); in arm_cpu_realizefn()
2098 cpu->has_vfp != cpu->has_neon) { in arm_cpu_realizefn()
2108 if (cpu->has_vfp_d32 != cpu->has_neon) { in arm_cpu_realizefn()
2113 if (!cpu->has_vfp_d32) { in arm_cpu_realizefn()
2116 u = cpu->isar.mvfr0; in arm_cpu_realizefn()
2118 cpu->isar.mvfr0 = u; in arm_cpu_realizefn()
2121 if (!cpu->has_vfp) { in arm_cpu_realizefn()
2125 t = cpu->isar.id_aa64isar1; in arm_cpu_realizefn()
2127 cpu->isar.id_aa64isar1 = t; in arm_cpu_realizefn()
2129 t = cpu->isar.id_aa64pfr0; in arm_cpu_realizefn()
2131 cpu->isar.id_aa64pfr0 = t; in arm_cpu_realizefn()
2133 u = cpu->isar.id_isar6; in arm_cpu_realizefn()
2136 cpu->isar.id_isar6 = u; in arm_cpu_realizefn()
2138 u = cpu->isar.mvfr0; in arm_cpu_realizefn()
2148 cpu->isar.mvfr0 = u; in arm_cpu_realizefn()
2150 u = cpu->isar.mvfr1; in arm_cpu_realizefn()
2157 cpu->isar.mvfr1 = u; in arm_cpu_realizefn()
2159 u = cpu->isar.mvfr2; in arm_cpu_realizefn()
2161 cpu->isar.mvfr2 = u; in arm_cpu_realizefn()
2164 if (!cpu->has_neon) { in arm_cpu_realizefn()
2170 t = cpu->isar.id_aa64isar0; in arm_cpu_realizefn()
2178 cpu->isar.id_aa64isar0 = t; in arm_cpu_realizefn()
2180 t = cpu->isar.id_aa64isar1; in arm_cpu_realizefn()
2184 cpu->isar.id_aa64isar1 = t; in arm_cpu_realizefn()
2186 t = cpu->isar.id_aa64pfr0; in arm_cpu_realizefn()
2188 cpu->isar.id_aa64pfr0 = t; in arm_cpu_realizefn()
2190 u = cpu->isar.id_isar5; in arm_cpu_realizefn()
2196 cpu->isar.id_isar5 = u; in arm_cpu_realizefn()
2198 u = cpu->isar.id_isar6; in arm_cpu_realizefn()
2203 cpu->isar.id_isar6 = u; in arm_cpu_realizefn()
2206 u = cpu->isar.mvfr1; in arm_cpu_realizefn()
2211 cpu->isar.mvfr1 = u; in arm_cpu_realizefn()
2213 u = cpu->isar.mvfr2; in arm_cpu_realizefn()
2215 cpu->isar.mvfr2 = u; in arm_cpu_realizefn()
2219 if (!cpu->has_neon && !cpu->has_vfp) { in arm_cpu_realizefn()
2223 t = cpu->isar.id_aa64isar0; in arm_cpu_realizefn()
2225 cpu->isar.id_aa64isar0 = t; in arm_cpu_realizefn()
2227 t = cpu->isar.id_aa64isar1; in arm_cpu_realizefn()
2229 cpu->isar.id_aa64isar1 = t; in arm_cpu_realizefn()
2231 u = cpu->isar.mvfr0; in arm_cpu_realizefn()
2233 cpu->isar.mvfr0 = u; in arm_cpu_realizefn()
2236 u = cpu->isar.mvfr1; in arm_cpu_realizefn()
2238 cpu->isar.mvfr1 = u; in arm_cpu_realizefn()
2241 if (arm_feature(env, ARM_FEATURE_M) && !cpu->has_dsp) { in arm_cpu_realizefn()
2246 u = cpu->isar.id_isar1; in arm_cpu_realizefn()
2248 cpu->isar.id_isar1 = u; in arm_cpu_realizefn()
2250 u = cpu->isar.id_isar2; in arm_cpu_realizefn()
2253 cpu->isar.id_isar2 = u; in arm_cpu_realizefn()
2255 u = cpu->isar.id_isar3; in arm_cpu_realizefn()
2258 cpu->isar.id_isar3 = u; in arm_cpu_realizefn()
2263 * We rely on no XScale CPU having VFP so we can use the same bits in the in arm_cpu_realizefn()
2267 !cpu_isar_feature(aa32_vfp_simd, cpu) || in arm_cpu_realizefn()
2290 * This can only ever happen for hotplugging a CPU, or if in arm_cpu_realizefn()
2291 * the board code incorrectly creates a CPU which it has in arm_cpu_realizefn()
2294 error_setg(errp, "This CPU requires a smaller page size " in arm_cpu_realizefn()
2301 /* This cpu-id-to-MPIDR affinity is used only for TCG; KVM will override it. in arm_cpu_realizefn()
2306 if (cpu->mp_affinity == ARM64_AFFINITY_INVALID) { in arm_cpu_realizefn()
2307 cpu->mp_affinity = arm_build_mp_affinity(cs->cpu_index, in arm_cpu_realizefn()
2311 if (cpu->reset_hivecs) { in arm_cpu_realizefn()
2312 cpu->reset_sctlr |= (1 << 13); in arm_cpu_realizefn()
2315 if (cpu->cfgend) { in arm_cpu_realizefn()
2317 cpu->reset_sctlr |= SCTLR_EE; in arm_cpu_realizefn()
2319 cpu->reset_sctlr |= SCTLR_B; in arm_cpu_realizefn()
2323 if (!arm_feature(env, ARM_FEATURE_M) && !cpu->has_el3) { in arm_cpu_realizefn()
2324 /* If the has_el3 CPU property is disabled then we need to disable the in arm_cpu_realizefn()
2333 cpu->isar.id_pfr1 = FIELD_DP32(cpu->isar.id_pfr1, ID_PFR1, SECURITY, 0); in arm_cpu_realizefn()
2334 cpu->isar.id_dfr0 = FIELD_DP32(cpu->isar.id_dfr0, ID_DFR0, COPSDBG, 0); in arm_cpu_realizefn()
2335 cpu->isar.id_aa64pfr0 = FIELD_DP64(cpu->isar.id_aa64pfr0, in arm_cpu_realizefn()
2339 cpu->isar.id_aa64pfr0 = FIELD_DP64(cpu->isar.id_aa64pfr0, in arm_cpu_realizefn()
2343 if (!cpu->has_el2) { in arm_cpu_realizefn()
2347 if (!cpu->has_pmu) { in arm_cpu_realizefn()
2351 pmu_init(cpu); in arm_cpu_realizefn()
2354 arm_register_pre_el_change_hook(cpu, &pmu_pre_el_change, 0); in arm_cpu_realizefn()
2355 arm_register_el_change_hook(cpu, &pmu_post_el_change, 0); in arm_cpu_realizefn()
2359 cpu->pmu_timer = timer_new_ns(QEMU_CLOCK_VIRTUAL, arm_pmu_timer_cb, in arm_cpu_realizefn()
2360 cpu); in arm_cpu_realizefn()
2363 cpu->isar.id_aa64dfr0 = in arm_cpu_realizefn()
2364 FIELD_DP64(cpu->isar.id_aa64dfr0, ID_AA64DFR0, PMUVER, 0); in arm_cpu_realizefn()
2365 cpu->isar.id_dfr0 = FIELD_DP32(cpu->isar.id_dfr0, ID_DFR0, PERFMON, 0); in arm_cpu_realizefn()
2366 cpu->pmceid0 = 0; in arm_cpu_realizefn()
2367 cpu->pmceid1 = 0; in arm_cpu_realizefn()
2375 cpu->isar.id_aa64pfr0 = FIELD_DP64(cpu->isar.id_aa64pfr0, in arm_cpu_realizefn()
2377 cpu->isar.id_pfr1 = FIELD_DP32(cpu->isar.id_pfr1, in arm_cpu_realizefn()
2381 if (cpu_isar_feature(aa64_mte, cpu)) { in arm_cpu_realizefn()
2387 assert(cpu->gm_blocksize >= 3 && cpu->gm_blocksize <= 6); in arm_cpu_realizefn()
2396 if (tcg_enabled() && cpu->tag_memory == NULL) { in arm_cpu_realizefn()
2397 cpu->isar.id_aa64pfr1 = in arm_cpu_realizefn()
2398 FIELD_DP64(cpu->isar.id_aa64pfr1, ID_AA64PFR1, MTE, 1); in arm_cpu_realizefn()
2405 if (kvm_enabled() && !cpu->kvm_mte) { in arm_cpu_realizefn()
2406 FIELD_DP64(cpu->isar.id_aa64pfr1, ID_AA64PFR1, MTE, 0); in arm_cpu_realizefn()
2412 if (tcg_enabled() && cpu_isar_feature(aa64_wfxt, cpu)) { in arm_cpu_realizefn()
2413 cpu->wfxt_timer = timer_new_ns(QEMU_CLOCK_VIRTUAL, in arm_cpu_realizefn()
2414 arm_wfxt_timer_cb, cpu); in arm_cpu_realizefn()
2426 cpu->isar.id_aa64dfr0 = in arm_cpu_realizefn()
2427 FIELD_DP64(cpu->isar.id_aa64dfr0, ID_AA64DFR0, PMSVER, 0); in arm_cpu_realizefn()
2429 cpu->isar.id_aa64dfr0 = in arm_cpu_realizefn()
2430 FIELD_DP64(cpu->isar.id_aa64dfr0, ID_AA64DFR0, TRACEBUFFER, 0); in arm_cpu_realizefn()
2432 cpu->isar.id_aa64dfr0 = in arm_cpu_realizefn()
2433 FIELD_DP64(cpu->isar.id_aa64dfr0, ID_AA64DFR0, TRACEFILT, 0); in arm_cpu_realizefn()
2434 cpu->isar.id_dfr0 = in arm_cpu_realizefn()
2435 FIELD_DP32(cpu->isar.id_dfr0, ID_DFR0, TRACEFILT, 0); in arm_cpu_realizefn()
2437 cpu->isar.id_aa64dfr0 = in arm_cpu_realizefn()
2438 FIELD_DP64(cpu->isar.id_aa64dfr0, ID_AA64DFR0, TRACEVER, 0); in arm_cpu_realizefn()
2439 cpu->isar.id_dfr0 = in arm_cpu_realizefn()
2440 FIELD_DP32(cpu->isar.id_dfr0, ID_DFR0, COPTRC, 0); in arm_cpu_realizefn()
2442 cpu->isar.id_dfr0 = in arm_cpu_realizefn()
2443 FIELD_DP32(cpu->isar.id_dfr0, ID_DFR0, MMAPTRC, 0); in arm_cpu_realizefn()
2445 cpu->isar.id_aa64pfr0 = in arm_cpu_realizefn()
2446 FIELD_DP64(cpu->isar.id_aa64pfr0, ID_AA64PFR0, AMU, 0); in arm_cpu_realizefn()
2447 cpu->isar.id_pfr0 = in arm_cpu_realizefn()
2448 FIELD_DP32(cpu->isar.id_pfr0, ID_PFR0, AMU, 0); in arm_cpu_realizefn()
2450 cpu->isar.id_aa64pfr0 = in arm_cpu_realizefn()
2451 FIELD_DP64(cpu->isar.id_aa64pfr0, ID_AA64PFR0, MPAM, 0); in arm_cpu_realizefn()
2454 /* MPU can be configured out of a PMSA CPU either by setting has-mpu in arm_cpu_realizefn()
2457 if (!cpu->has_mpu || cpu->pmsav7_dregion == 0) { in arm_cpu_realizefn()
2458 cpu->has_mpu = false; in arm_cpu_realizefn()
2459 cpu->pmsav7_dregion = 0; in arm_cpu_realizefn()
2460 cpu->pmsav8r_hdregion = 0; in arm_cpu_realizefn()
2465 uint32_t nr = cpu->pmsav7_dregion; in arm_cpu_realizefn()
2488 if (cpu->pmsav8r_hdregion > 0xff) { in arm_cpu_realizefn()
2490 cpu->pmsav8r_hdregion); in arm_cpu_realizefn()
2494 if (cpu->pmsav8r_hdregion) { in arm_cpu_realizefn()
2496 cpu->pmsav8r_hdregion); in arm_cpu_realizefn()
2498 cpu->pmsav8r_hdregion); in arm_cpu_realizefn()
2503 uint32_t nr = cpu->sau_sregion; in arm_cpu_realizefn()
2521 if (tcg_enabled() && cpu_isar_feature(aa64_rme, cpu)) { in arm_cpu_realizefn()
2522 arm_register_el_change_hook(cpu, >_rme_post_el_change, 0); in arm_cpu_realizefn()
2526 register_cp_regs_for_features(cpu); in arm_cpu_realizefn()
2527 arm_cpu_register_gdb_regs_for_features(cpu); in arm_cpu_realizefn()
2528 arm_cpu_register_gdb_commands(cpu); in arm_cpu_realizefn()
2530 init_cpreg_list(cpu); in arm_cpu_realizefn()
2535 bool has_secure = cpu->has_el3 || arm_feature(env, ARM_FEATURE_M_SECURITY); in arm_cpu_realizefn()
2541 if (cpu->tag_memory != NULL) { in arm_cpu_realizefn()
2548 if (!cpu->secure_memory) { in arm_cpu_realizefn()
2549 cpu->secure_memory = cs->memory; in arm_cpu_realizefn()
2551 cpu_address_space_init(cs, ARMASIdx_S, "cpu-secure-memory", in arm_cpu_realizefn()
2552 cpu->secure_memory); in arm_cpu_realizefn()
2555 if (cpu->tag_memory != NULL) { in arm_cpu_realizefn()
2556 cpu_address_space_init(cs, ARMASIdx_TagNS, "cpu-tag-memory", in arm_cpu_realizefn()
2557 cpu->tag_memory); in arm_cpu_realizefn()
2559 cpu_address_space_init(cs, ARMASIdx_TagS, "cpu-tag-memory", in arm_cpu_realizefn()
2560 cpu->secure_tag_memory); in arm_cpu_realizefn()
2564 cpu_address_space_init(cs, ARMASIdx_NS, "cpu-memory", cs->memory); in arm_cpu_realizefn()
2567 if (cpu->core_count == -1) { in arm_cpu_realizefn()
2568 cpu->core_count = smp_cpus; in arm_cpu_realizefn()
2573 int dcz_blocklen = 4 << cpu->dcz_blocksize; in arm_cpu_realizefn()
2582 * is 2KiB, no cpu actually uses such a large blocklen. in arm_cpu_realizefn()
2591 if (cpu_isar_feature(aa64_mte, cpu)) { in arm_cpu_realizefn()
2612 /* For backwards compatibility usermode emulation allows "-cpu any", in arm_cpu_class_by_name()
2613 * which has the same semantics as "-cpu max". in arm_cpu_class_by_name()
2641 ARMCPU *cpu = ARM_CPU(cs); in arm_gdb_arch_name() local
2642 CPUARMState *env = &cpu->env; in arm_gdb_arch_name()
2644 if (arm_gdbstub_is_aarch64(cpu)) { in arm_gdb_arch_name()
2655 ARMCPU *cpu = ARM_CPU(cs); in arm_gdb_get_core_xml_file() local
2656 CPUARMState *env = &cpu->env; in arm_gdb_get_core_xml_file()
2658 if (arm_gdbstub_is_aarch64(cpu)) { in arm_gdb_get_core_xml_file()
2692 #include "hw/core/sysemu-cpu-ops.h"