Lines Matching full:if

22      * Assuming fine-grained-traps are active, return true if we  in fgt_svc()
25 * because if this is AArch32 EL1 then arm_fgt_active() is false. in fgt_svc()
33 /* Return true if memory alignment should be enforced. */
40 if (sctlr & SCTLR_A) { in aprofile_require_alignment()
48 if (arm_feature(env, ARM_FEATURE_PMSA)) { in aprofile_require_alignment()
53 * With VMSA, if translation is disabled, then the default memory type in aprofile_require_alignment()
58 if (sctlr & SCTLR_M) { in aprofile_require_alignment()
62 if (el < 2 && (arm_hcr_el2_eff(env) & (HCR_DC | HCR_VM))) { in aprofile_require_alignment()
86 if (arm_singlestep_active(env)) { in rebuild_hflags_common()
99 if (sctlr_b) { in rebuild_hflags_common_32()
102 if (arm_cpu_data_is_big_endian_a32(env, sctlr_b)) { in rebuild_hflags_common_32()
117 if (ccr & R_V7M_CCR_UNALIGN_TRP_MASK) { in rebuild_hflags_m32()
121 if (arm_v7m_is_handler_mode(env)) { in rebuild_hflags_m32()
130 if (arm_feature(env, ARM_FEATURE_V8) && in rebuild_hflags_m32()
136 if (arm_feature(env, ARM_FEATURE_M_SECURITY) && env->v7m.secure) { in rebuild_hflags_m32()
146 if (!cpu_isar_feature(aa64_sme_fa64, env_archcpu(env))) { in sme_fa64()
150 if (el <= 1 && !el_is_in_host(env, el)) { in sme_fa64()
151 if (!FIELD_EX64(env->vfp.smcr_el[1], SMCR, FA64)) { in sme_fa64()
155 if (el <= 2 && arm_is_el2_enabled(env)) { in sme_fa64()
156 if (!FIELD_EX64(env->vfp.smcr_el[2], SMCR, FA64)) { in sme_fa64()
160 if (arm_feature(env, ARM_FEATURE_EL3)) { in sme_fa64()
161 if (!FIELD_EX64(env->vfp.smcr_el[3], SMCR, FA64)) { in sme_fa64()
176 if (aprofile_require_alignment(env, el, sctlr)) { in rebuild_hflags_a32()
180 if (arm_el_is_aa64(env, 1)) { in rebuild_hflags_a32()
184 if (el < 2 && env->cp15.hstr_el2 && arm_is_el2_enabled(env) && in rebuild_hflags_a32()
189 if (arm_fgt_active(env, el)) { in rebuild_hflags_a32()
191 if (fgt_svc(env, el)) { in rebuild_hflags_a32()
196 if (env->uncached_cpsr & CPSR_IL) { in rebuild_hflags_a32()
205 if (el == 0 in rebuild_hflags_a32()
236 if (cpu_isar_feature(aa64_sve, env_archcpu(env))) { in rebuild_hflags_a64()
240 * If either FP or SVE are disabled, translator does not need len. in rebuild_hflags_a64()
241 * If SVE EL > FP EL, FP exception has precedence, and translator in rebuild_hflags_a64()
245 if (fp_el != 0) { in rebuild_hflags_a64()
246 if (sve_el > fp_el) { in rebuild_hflags_a64()
249 } else if (sve_el == 0) { in rebuild_hflags_a64()
254 if (cpu_isar_feature(aa64_sme, env_archcpu(env))) { in rebuild_hflags_a64()
259 if (sme_el == 0) { in rebuild_hflags_a64()
260 /* Similarly, do not compute SVL if SME is disabled. */ in rebuild_hflags_a64()
263 if (sm) { in rebuild_hflags_a64()
264 /* If SVE is disabled, we will not have set VL above. */ in rebuild_hflags_a64()
268 if (sm) { in rebuild_hflags_a64()
277 if (aprofile_require_alignment(env, el, sctlr)) { in rebuild_hflags_a64()
281 if (arm_cpu_data_is_big_endian_a64(el, sctlr)) { in rebuild_hflags_a64()
285 if (cpu_isar_feature(aa64_pauth, env_archcpu(env))) { in rebuild_hflags_a64()
292 if (sctlr & (SCTLR_EnIA | SCTLR_EnIB | SCTLR_EnDA | SCTLR_EnDB)) { in rebuild_hflags_a64()
297 if (cpu_isar_feature(aa64_bti, env_archcpu(env))) { in rebuild_hflags_a64()
299 if (sctlr & (el == 0 ? SCTLR_BT0 : SCTLR_BT1)) { in rebuild_hflags_a64()
304 if (cpu_isar_feature(aa64_lse2, env_archcpu(env))) { in rebuild_hflags_a64()
305 if (sctlr & SCTLR_nAA) { in rebuild_hflags_a64()
311 if (!(env->pstate & PSTATE_UAO)) { in rebuild_hflags_a64()
316 if ((hcr & (HCR_NV | HCR_NV1)) != (HCR_NV | HCR_NV1)) { in rebuild_hflags_a64()
326 if (env->cp15.hcr_el2 & HCR_TGE) { in rebuild_hflags_a64()
335 if (env->pstate & PSTATE_IL) { in rebuild_hflags_a64()
339 if (arm_fgt_active(env, el)) { in rebuild_hflags_a64()
341 if (FIELD_EX64(env->cp15.fgt_exec[FGTREG_HFGITR], HFGITR_EL2, ERET)) { in rebuild_hflags_a64()
344 if (fgt_svc(env, el)) { in rebuild_hflags_a64()
351 * of "is EL2 enabled" and the NV bit can only be set if FEAT_NV is present. in rebuild_hflags_a64()
353 if (el == 1 && (hcr & HCR_NV)) { in rebuild_hflags_a64()
356 if (hcr & HCR_NV1) { in rebuild_hflags_a64()
359 if (hcr & HCR_NV2) { in rebuild_hflags_a64()
361 if (hcr & HCR_E2H) { in rebuild_hflags_a64()
364 if (env->cp15.sctlr_el[2] & SCTLR_EE) { in rebuild_hflags_a64()
370 if (cpu_isar_feature(aa64_mte, env_archcpu(env))) { in rebuild_hflags_a64()
372 * Set MTE_ACTIVE if any access may be Checked, and leave clear in rebuild_hflags_a64()
373 * if all accesses must be Unchecked: in rebuild_hflags_a64()
374 * 1) If no TBI, then there are no tags in the address to check, in rebuild_hflags_a64()
375 * 2) If Tag Check Override, then all accesses are Unchecked, in rebuild_hflags_a64()
376 * 3) If Tag Check Fail == 0, then Checked access have no effect, in rebuild_hflags_a64()
377 * 4) If no Allocation Tag Access, then all accesses are Unchecked. in rebuild_hflags_a64()
379 if (allocation_tag_access_enabled(env, el, sctlr)) { in rebuild_hflags_a64()
381 if (tbid in rebuild_hflags_a64()
385 if (!EX_TBFLAG_A64(flags, UNPRIV)) { in rebuild_hflags_a64()
396 /* And again for unprivileged accesses, if required. */ in rebuild_hflags_a64()
397 if (EX_TBFLAG_A64(flags, UNPRIV) in rebuild_hflags_a64()
409 if (EX_TBFLAG_A64(flags, UNPRIV)) { in rebuild_hflags_a64()
410 if (allocation_tag_access_enabled(env, 0, sctlr)) { in rebuild_hflags_a64()
420 if (env->vfp.fpcr & FPCR_AH) { in rebuild_hflags_a64()
423 if (env->vfp.fpcr & FPCR_NEP) { in rebuild_hflags_a64()
425 * In streaming-SVE without FA64, NEP behaves as if zero; in rebuild_hflags_a64()
428 if (!(EX_TBFLAG_A64(flags, PSTATE_SM) && !sme_fa64(env, el))) { in rebuild_hflags_a64()
442 if (is_a64(env)) { in rebuild_hflags_internal()
444 } else if (arm_feature(env, ARM_FEATURE_M)) { in rebuild_hflags_internal()
457 * If we have triggered a EL state change we can't rely on the
478 * If we have triggered a EL state change we can't rely on the
511 if (unlikely(c.flags != r.flags || c.flags2 != r.flags2)) { in assert_hflags_rebuild_correctly()
524 * Return true if there is definitely no predication of MVE in mve_no_pred()
525 * instructions by VPR or LTPSIZE. (Returning false even if there in mve_no_pred()
528 * If the CPU does not implement MVE then this TB flag is always 0. in mve_no_pred()
530 * NOTE: if you change this logic, the "recalculate s->mve_no_pred" in mve_no_pred()
538 if (cpu_isar_feature(aa32_mve, env_archcpu(env))) { in mve_no_pred()
541 if (env->v7m.vpr) { in mve_no_pred()
544 if (env->v7m.ltpsize < 4) { in mve_no_pred()
559 if (EX_TBFLAG_ANY(flags, AARCH64_STATE)) { in arm_get_tb_cpu_state()
561 if (cpu_isar_feature(aa64_bti, env_archcpu(env))) { in arm_get_tb_cpu_state()
567 if (arm_feature(env, ARM_FEATURE_M)) { in arm_get_tb_cpu_state()
568 if (arm_feature(env, ARM_FEATURE_M_SECURITY) && in arm_get_tb_cpu_state()
574 if ((env->v7m.fpccr[env->v7m.secure] & R_V7M_FPCCR_ASPEN_MASK) && in arm_get_tb_cpu_state()
587 if (env->v7m.fpccr[is_secure] & R_V7M_FPCCR_LSPACT_MASK) { in arm_get_tb_cpu_state()
591 if (mve_no_pred(env)) { in arm_get_tb_cpu_state()
599 if (arm_feature(env, ARM_FEATURE_XSCALE)) { in arm_get_tb_cpu_state()
605 if (env->vfp.xregs[ARM_VFP_FPEXC] & (1 << 30)) { in arm_get_tb_cpu_state()
623 if (EX_TBFLAG_ANY(flags, SS_ACTIVE) && (env->pstate & PSTATE_SS)) { in arm_get_tb_cpu_state()