Lines Matching full:if
20 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
92 * register. If top is true, returns the top 16 bits; otherwise the bottom
98 #if HOST_BIG_ENDIAN in vfp_f16_offset()
99 if (!top) { in vfp_f16_offset()
103 if (top) { in vfp_f16_offset()
111 * Generate code for M-profile lazy FP state preservation if needed;
116 if (s->v7m_lspact) { in gen_preserve_fp_state()
122 if (translator_io_start(&s->base)) { in gen_preserve_fp_state()
127 * If the preserve_fp_state helper doesn't throw an exception in gen_preserve_fp_state()
135 * If we're about to create a new fp context then that in gen_preserve_fp_state()
142 if (skip_context_update || !s->v7m_new_fp_ctxt_needed) { in gen_preserve_fp_state()
151 * ownership of the FP context, and create a new context if
158 if (s->v8m_fpccr_s_wrong) { in gen_update_fp_context()
162 if (s->v8m_secure) { in gen_update_fp_context()
172 if (s->v7m_new_fp_ctxt_needed) { in gen_update_fp_context()
182 if (dc_isar_feature(aa32_mve, s)) { in gen_update_fp_context()
192 * if and only if we have MVE. in gen_update_fp_context()
199 if (s->v8m_secure) { in gen_update_fp_context()
213 * If VFP is enabled, return true. If not, emit code to generate an
221 if (s->fp_excp_el) { in vfp_access_check_a()
242 if (s->sme_trap_nonstreaming) { in vfp_access_check_a()
249 if (!s->vfp_enabled && !ignore_vfp_enabled) { in vfp_access_check_a()
260 * If VFP is enabled, do the necessary M-profile lazy-FP handling and then
261 * return true. If not, emit code to generate an appropriate exception and
267 if (s->fp_excp_el) { in vfp_access_check_m()
282 /* Trigger lazy-state preservation if necessary */ in vfp_access_check_m()
285 if (!skip_context_update) { in vfp_access_check_m()
286 /* Update ownership of FP context and create new FP context if needed */ in vfp_access_check_m()
299 if (arm_dc_feature(s, ARM_FEATURE_M)) { in vfp_access_check()
311 if (!dc_isar_feature(aa32_vsel, s)) { in trans_VSEL()
315 if (sz == 3 && !dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VSEL()
319 if (sz == 1 && !dc_isar_feature(aa32_fp16_arith, s)) { in trans_VSEL()
323 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VSEL()
324 if (sz == 3 && !dc_isar_feature(aa32_simd_r32, s) && in trans_VSEL()
333 if (!vfp_access_check(s)) { in trans_VSEL()
337 if (sz == 3) { in trans_VSEL()
408 if (sz == 1) { in trans_VSEL()
437 if (!dc_isar_feature(aa32_vrint, s)) { in trans_VRINT()
441 if (sz == 3 && !dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VRINT()
445 if (sz == 1 && !dc_isar_feature(aa32_fp16_arith, s)) { in trans_VRINT()
449 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VRINT()
450 if (sz == 3 && !dc_isar_feature(aa32_simd_r32, s) && in trans_VRINT()
458 if (!vfp_access_check(s)) { in trans_VRINT()
462 if (sz == 1) { in trans_VRINT()
470 if (sz == 3) { in trans_VRINT()
484 if (sz == 1) { in trans_VRINT()
505 if (!dc_isar_feature(aa32_vcvt_dr, s)) { in trans_VCVT()
509 if (sz == 3 && !dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCVT()
513 if (sz == 1 && !dc_isar_feature(aa32_fp16_arith, s)) { in trans_VCVT()
517 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VCVT()
518 if (sz == 3 && !dc_isar_feature(aa32_simd_r32, s) && (a->vm & 0x10)) { in trans_VCVT()
525 if (!vfp_access_check(s)) { in trans_VCVT()
529 if (sz == 1) { in trans_VCVT()
538 if (sz == 3) { in trans_VCVT()
545 if (is_signed) { in trans_VCVT()
557 if (sz == 1) { in trans_VCVT()
558 if (is_signed) { in trans_VCVT()
564 if (is_signed) { in trans_VCVT()
582 * predicated, but they are subject to beatwise execution if they are in mve_skip_vmov()
586 * this means only that if PSR.ECI says we should not be executing in mve_skip_vmov()
591 * Note that if PSR.ECI is non-zero then we cannot be in an IT block. in mve_skip_vmov()
593 * Return true if this VMOV scalar <-> gpreg should be skipped because in mve_skip_vmov()
600 if (!dc_isar_feature(aa32_mve, s)) { in mve_skip_vmov()
628 if (!dc_isar_feature(aa32_mve, s)) { in trans_VMOV_to_gp()
629 if (a->size == MO_32 in trans_VMOV_to_gp()
636 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VMOV_to_gp()
637 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vn & 0x10)) { in trans_VMOV_to_gp()
641 if (dc_isar_feature(aa32_mve, s)) { in trans_VMOV_to_gp()
642 if (!mve_eci_check(s)) { in trans_VMOV_to_gp()
647 if (!vfp_access_check(s)) { in trans_VMOV_to_gp()
651 if (!mve_skip_vmov(s, a->vn, a->index, a->size)) { in trans_VMOV_to_gp()
658 if (dc_isar_feature(aa32_mve, s)) { in trans_VMOV_to_gp()
673 if (!dc_isar_feature(aa32_mve, s)) { in trans_VMOV_from_gp()
674 if (a->size == MO_32 in trans_VMOV_from_gp()
681 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VMOV_from_gp()
682 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vn & 0x10)) { in trans_VMOV_from_gp()
686 if (dc_isar_feature(aa32_mve, s)) { in trans_VMOV_from_gp()
687 if (!mve_eci_check(s)) { in trans_VMOV_from_gp()
692 if (!vfp_access_check(s)) { in trans_VMOV_from_gp()
696 if (!mve_skip_vmov(s, a->vn, a->index, a->size)) { in trans_VMOV_from_gp()
701 if (dc_isar_feature(aa32_mve, s)) { in trans_VMOV_from_gp()
713 if (!arm_dc_feature(s, ARM_FEATURE_NEON)) { in trans_VDUP()
717 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VDUP()
718 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vn & 0x10)) { in trans_VDUP()
722 if (a->b && a->e) { in trans_VDUP()
726 if (a->q && (a->vn & 1)) { in trans_VDUP()
731 if (a->b) { in trans_VDUP()
733 } else if (a->e) { in trans_VDUP()
739 if (!vfp_access_check(s)) { in trans_VDUP()
754 if (arm_dc_feature(s, ARM_FEATURE_M)) { in trans_VMSR_VMRS()
759 if (!dc_isar_feature(aa32_fpsp_v2, s)) { in trans_VMSR_VMRS()
769 if (IS_USER(s) && dc_isar_feature(aa32_fpsp_v3, s)) { in trans_VMSR_VMRS()
776 if (IS_USER(s) || !arm_dc_feature(s, ARM_FEATURE_MVFR)) { in trans_VMSR_VMRS()
782 if (IS_USER(s) || !arm_dc_feature(s, ARM_FEATURE_V8)) { in trans_VMSR_VMRS()
790 if (IS_USER(s)) { in trans_VMSR_VMRS()
798 if (IS_USER(s) || dc_isar_feature(aa32_fpsp_v3, s)) { in trans_VMSR_VMRS()
810 if (!vfp_access_check_a(s, ignore_vfp_enabled)) { in trans_VMSR_VMRS()
814 if (a->l) { in trans_VMSR_VMRS()
821 if (s->current_el == 1) { in trans_VMSR_VMRS()
835 if (a->rt == 15) { in trans_VMSR_VMRS()
847 if (a->rt == 15) { in trans_VMSR_VMRS()
895 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VMOV_half()
899 if (a->rt == 15) { in trans_VMOV_half()
904 if (!vfp_access_check(s)) { in trans_VMOV_half()
908 if (a->l) { in trans_VMOV_half()
927 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VMOV_single()
931 if (!vfp_access_check(s)) { in trans_VMOV_single()
935 if (a->l) { in trans_VMOV_single()
939 if (a->rt == 15) { in trans_VMOV_single()
958 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VMOV_64_sp()
966 if (!vfp_access_check(s)) { in trans_VMOV_64_sp()
970 if (a->op) { in trans_VMOV_64_sp()
998 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VMOV_64_dp()
1002 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VMOV_64_dp()
1003 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vm & 0x10)) { in trans_VMOV_64_dp()
1007 if (!vfp_access_check(s)) { in trans_VMOV_64_dp()
1011 if (a->op) { in trans_VMOV_64_dp()
1035 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VLDR_VSTR_hp()
1039 if (!vfp_access_check(s)) { in trans_VLDR_VSTR_hp()
1045 if (!a->u) { in trans_VLDR_VSTR_hp()
1052 if (a->l) { in trans_VLDR_VSTR_hp()
1067 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VLDR_VSTR_sp()
1071 if (!vfp_access_check(s)) { in trans_VLDR_VSTR_sp()
1076 if (!a->u) { in trans_VLDR_VSTR_sp()
1083 if (a->l) { in trans_VLDR_VSTR_sp()
1100 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VLDR_VSTR_dp()
1104 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VLDR_VSTR_dp()
1105 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VLDR_VSTR_dp()
1109 if (!vfp_access_check(s)) { in trans_VLDR_VSTR_dp()
1114 if (!a->u) { in trans_VLDR_VSTR_dp()
1121 if (a->l) { in trans_VLDR_VSTR_dp()
1137 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VLDM_VSTM_sp()
1143 if (n == 0 || (a->vd + n) > 32) { in trans_VLDM_VSTM_sp()
1150 if (a->rn == 15 && a->w) { in trans_VLDM_VSTM_sp()
1157 if (!vfp_access_check(s)) { in trans_VLDM_VSTM_sp()
1163 if (a->p) { in trans_VLDM_VSTM_sp()
1168 if (s->v8m_stackcheck && a->rn == 13 && a->w) { in trans_VLDM_VSTM_sp()
1171 * and is either the old SP (if post-increment) or in trans_VLDM_VSTM_sp()
1172 * the new SP (if pre-decrement). For post-increment in trans_VLDM_VSTM_sp()
1183 if (a->l) { in trans_VLDM_VSTM_sp()
1194 if (a->w) { in trans_VLDM_VSTM_sp()
1196 if (a->p) { in trans_VLDM_VSTM_sp()
1215 if (!dc_isar_feature(aa32_fpsp_v2, s) && !dc_isar_feature(aa32_mve, s)) { in trans_VLDM_VSTM_dp()
1221 if (n == 0 || (a->vd + n) > 32 || n > 16) { in trans_VLDM_VSTM_dp()
1228 if (a->rn == 15 && a->w) { in trans_VLDM_VSTM_dp()
1233 /* UNDEF accesses to D16-D31 if they don't exist */ in trans_VLDM_VSTM_dp()
1234 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd + n) > 16) { in trans_VLDM_VSTM_dp()
1240 if (!vfp_access_check(s)) { in trans_VLDM_VSTM_dp()
1246 if (a->p) { in trans_VLDM_VSTM_dp()
1251 if (s->v8m_stackcheck && a->rn == 13 && a->w) { in trans_VLDM_VSTM_dp()
1254 * and is either the old SP (if post-increment) or in trans_VLDM_VSTM_dp()
1255 * the new SP (if pre-decrement). For post-increment in trans_VLDM_VSTM_dp()
1266 if (a->l) { in trans_VLDM_VSTM_dp()
1277 if (a->w) { in trans_VLDM_VSTM_dp()
1279 if (a->p) { in trans_VLDM_VSTM_dp()
1281 } else if (a->imm & 1) { in trans_VLDM_VSTM_dp()
1287 if (offset != 0) { in trans_VLDM_VSTM_dp()
1299 * The callback should emit code to write a value to vd. If
1318 * Return true if the specified S reg is in a scalar bank
1319 * (ie if it is s0..s7)
1327 * Return true if the specified D reg is in a scalar bank
1328 * (ie if it is d0..d3 or d16..d19)
1367 if (!dc_isar_feature(aa32_fpsp_v2, s)) { in do_vfp_3op_sp()
1371 if (!dc_isar_feature(aa32_fpshvec, s) && in do_vfp_3op_sp()
1376 if (!vfp_access_check(s)) { in do_vfp_3op_sp()
1380 if (veclen > 0) { in do_vfp_3op_sp()
1382 if (vfp_sreg_is_scalar(vd)) { in do_vfp_3op_sp()
1388 if (vfp_sreg_is_scalar(vm)) { in do_vfp_3op_sp()
1407 if (reads_vd) { in do_vfp_3op_sp()
1413 if (veclen == 0) { in do_vfp_3op_sp()
1422 if (delta_m) { in do_vfp_3op_sp()
1444 if (!dc_isar_feature(aa32_fp16_arith, s)) { in do_vfp_3op_hp()
1448 if (s->vec_len != 0 || s->vec_stride != 0) { in do_vfp_3op_hp()
1452 if (!vfp_access_check(s)) { in do_vfp_3op_hp()
1464 if (reads_vd) { in do_vfp_3op_hp()
1481 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in do_vfp_3op_dp()
1485 /* UNDEF accesses to D16-D31 if they don't exist */ in do_vfp_3op_dp()
1486 if (!dc_isar_feature(aa32_simd_r32, s) && ((vd | vn | vm) & 0x10)) { in do_vfp_3op_dp()
1490 if (!dc_isar_feature(aa32_fpshvec, s) && in do_vfp_3op_dp()
1495 if (!vfp_access_check(s)) { in do_vfp_3op_dp()
1499 if (veclen > 0) { in do_vfp_3op_dp()
1501 if (vfp_dreg_is_scalar(vd)) { in do_vfp_3op_dp()
1507 if (vfp_dreg_is_scalar(vm)) { in do_vfp_3op_dp()
1526 if (reads_vd) { in do_vfp_3op_dp()
1532 if (veclen == 0) { in do_vfp_3op_dp()
1540 if (delta_m) { in do_vfp_3op_dp()
1557 if (!dc_isar_feature(aa32_fpshvec, s) && in do_vfp_2op_sp()
1562 if (!vfp_access_check(s)) { in do_vfp_2op_sp()
1566 if (veclen > 0) { in do_vfp_2op_sp()
1568 if (vfp_sreg_is_scalar(vd)) { in do_vfp_2op_sp()
1574 if (vfp_sreg_is_scalar(vm)) { in do_vfp_2op_sp()
1593 if (veclen == 0) { in do_vfp_2op_sp()
1597 if (delta_m == 0) { in do_vfp_2op_sp()
1628 if (!dc_isar_feature(aa32_fp16_arith, s)) { in do_vfp_2op_hp()
1632 if (s->vec_len != 0 || s->vec_stride != 0) { in do_vfp_2op_hp()
1636 if (!vfp_access_check(s)) { in do_vfp_2op_hp()
1657 /* UNDEF accesses to D16-D31 if they don't exist */ in do_vfp_2op_dp()
1658 if (!dc_isar_feature(aa32_simd_r32, s) && ((vd | vm) & 0x10)) { in do_vfp_2op_dp()
1662 if (!dc_isar_feature(aa32_fpshvec, s) && in do_vfp_2op_dp()
1667 if (!vfp_access_check(s)) { in do_vfp_2op_dp()
1671 if (veclen > 0) { in do_vfp_2op_dp()
1673 if (vfp_dreg_is_scalar(vd)) { in do_vfp_2op_dp()
1679 if (vfp_dreg_is_scalar(vm)) { in do_vfp_2op_dp()
1698 if (veclen == 0) { in do_vfp_2op_dp()
1702 if (delta_m == 0) { in do_vfp_2op_dp()
2022 if (!dc_isar_feature(aa32_vminmaxnm, s)) { in trans_VMINNM_hp()
2031 if (!dc_isar_feature(aa32_vminmaxnm, s)) { in trans_VMAXNM_hp()
2040 if (!dc_isar_feature(aa32_vminmaxnm, s)) { in trans_VMINNM_sp()
2049 if (!dc_isar_feature(aa32_vminmaxnm, s)) { in trans_VMAXNM_sp()
2058 if (!dc_isar_feature(aa32_vminmaxnm, s)) { in trans_VMINNM_dp()
2067 if (!dc_isar_feature(aa32_vminmaxnm, s)) { in trans_VMAXNM_dp()
2086 * bit flipped if it is a negated-input. in do_vfm_hp()
2096 if (!dc_isar_feature(aa32_fp16_arith, s) || in do_vfm_hp()
2102 if (s->vec_len != 0 || s->vec_stride != 0) { in do_vfm_hp()
2106 if (!vfp_access_check(s)) { in do_vfm_hp()
2116 if (neg_n) { in do_vfm_hp()
2121 if (neg_d) { in do_vfm_hp()
2143 * bit flipped if it is a negated-input. in do_vfm_sp()
2153 if (!dc_isar_feature(aa32_simdfmac, s) || in do_vfm_sp()
2161 if (s->vec_len != 0 || s->vec_stride != 0) { in do_vfm_sp()
2165 if (!vfp_access_check(s)) { in do_vfm_sp()
2175 if (neg_n) { in do_vfm_sp()
2180 if (neg_d) { in do_vfm_sp()
2202 * bit flipped if it is a negated-input. in do_vfm_dp()
2212 if (!dc_isar_feature(aa32_simdfmac, s) || in do_vfm_dp()
2220 if (s->vec_len != 0 || s->vec_stride != 0) { in do_vfm_dp()
2224 /* UNDEF accesses to D16-D31 if they don't exist. */ in do_vfm_dp()
2225 if (!dc_isar_feature(aa32_simd_r32, s) && in do_vfm_dp()
2230 if (!vfp_access_check(s)) { in do_vfm_dp()
2240 if (neg_n) { in do_vfm_dp()
2245 if (neg_d) { in do_vfm_dp()
2274 if (!dc_isar_feature(aa32_fp16_arith, s)) { in MAKE_VFM_TRANS_FNS()
2278 if (s->vec_len != 0 || s->vec_stride != 0) { in MAKE_VFM_TRANS_FNS()
2282 if (!vfp_access_check(s)) { in MAKE_VFM_TRANS_FNS()
2299 if (!dc_isar_feature(aa32_fpsp_v3, s)) { in trans_VMOV_imm_sp()
2303 if (!dc_isar_feature(aa32_fpshvec, s) && in trans_VMOV_imm_sp()
2308 if (!vfp_access_check(s)) { in trans_VMOV_imm_sp()
2312 if (veclen > 0) { in trans_VMOV_imm_sp()
2314 if (vfp_sreg_is_scalar(vd)) { in trans_VMOV_imm_sp()
2327 if (veclen == 0) { in trans_VMOV_imm_sp()
2348 if (!dc_isar_feature(aa32_fpdp_v3, s)) { in trans_VMOV_imm_dp()
2352 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VMOV_imm_dp()
2353 if (!dc_isar_feature(aa32_simd_r32, s) && (vd & 0x10)) { in trans_VMOV_imm_dp()
2357 if (!dc_isar_feature(aa32_fpshvec, s) && in trans_VMOV_imm_dp()
2362 if (!vfp_access_check(s)) { in trans_VMOV_imm_dp()
2366 if (veclen > 0) { in trans_VMOV_imm_dp()
2368 if (vfp_dreg_is_scalar(vd)) { in trans_VMOV_imm_dp()
2381 if (veclen == 0) { in trans_VMOV_imm_dp()
2397 if (!dc_isar_feature(CHECK, s)) { \
2407 if (!dc_isar_feature(aa32_fp##PREC##_v2, s) && \
2448 if (!dc_isar_feature(aa32_fp16_arith, s)) { in DO_VFP_2OP()
2453 if (a->z && a->vm != 0) { in DO_VFP_2OP()
2457 if (!vfp_access_check(s)) { in DO_VFP_2OP()
2465 if (a->z) { in DO_VFP_2OP()
2471 if (a->e) { in DO_VFP_2OP()
2483 if (!dc_isar_feature(aa32_fpsp_v2, s)) { in trans_VCMP_sp()
2488 if (a->z && a->vm != 0) { in trans_VCMP_sp()
2492 if (!vfp_access_check(s)) { in trans_VCMP_sp()
2500 if (a->z) { in trans_VCMP_sp()
2506 if (a->e) { in trans_VCMP_sp()
2518 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCMP_dp()
2523 if (a->z && a->vm != 0) { in trans_VCMP_dp()
2527 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCMP_dp()
2528 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VCMP_dp()
2532 if (!vfp_access_check(s)) { in trans_VCMP_dp()
2540 if (a->z) { in trans_VCMP_dp()
2546 if (a->e) { in trans_VCMP_dp()
2560 if (!dc_isar_feature(aa32_fp16_spconv, s)) { in trans_VCVT_f32_f16()
2564 if (!vfp_access_check(s)) { in trans_VCVT_f32_f16()
2571 /* The T bit tells us if we want the low or high 16 bits of Vm */ in trans_VCVT_f32_f16()
2585 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCVT_f64_f16()
2589 if (!dc_isar_feature(aa32_fp16_dpconv, s)) { in trans_VCVT_f64_f16()
2593 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCVT_f64_f16()
2594 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_f64_f16()
2598 if (!vfp_access_check(s)) { in trans_VCVT_f64_f16()
2605 /* The T bit tells us if we want the low or high 16 bits of Vm */ in trans_VCVT_f64_f16()
2618 if (!dc_isar_feature(aa32_bf16, s)) { in trans_VCVT_b16_f32()
2622 if (!vfp_access_check(s)) { in trans_VCVT_b16_f32()
2641 if (!dc_isar_feature(aa32_fp16_spconv, s)) { in trans_VCVT_f16_f32()
2645 if (!vfp_access_check(s)) { in trans_VCVT_f16_f32()
2666 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCVT_f16_f64()
2670 if (!dc_isar_feature(aa32_fp16_dpconv, s)) { in trans_VCVT_f16_f64()
2674 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCVT_f16_f64()
2675 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vm & 0x10)) { in trans_VCVT_f16_f64()
2679 if (!vfp_access_check(s)) { in trans_VCVT_f16_f64()
2699 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VRINTR_hp()
2703 if (!vfp_access_check(s)) { in trans_VRINTR_hp()
2720 if (!dc_isar_feature(aa32_vrint, s)) { in trans_VRINTR_sp()
2724 if (!vfp_access_check(s)) { in trans_VRINTR_sp()
2741 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VRINTR_dp()
2745 if (!dc_isar_feature(aa32_vrint, s)) { in trans_VRINTR_dp()
2749 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VRINTR_dp()
2750 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VRINTR_dp()
2754 if (!vfp_access_check(s)) { in trans_VRINTR_dp()
2772 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VRINTZ_hp()
2776 if (!vfp_access_check(s)) { in trans_VRINTZ_hp()
2796 if (!dc_isar_feature(aa32_vrint, s)) { in trans_VRINTZ_sp()
2800 if (!vfp_access_check(s)) { in trans_VRINTZ_sp()
2820 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VRINTZ_dp()
2824 if (!dc_isar_feature(aa32_vrint, s)) { in trans_VRINTZ_dp()
2828 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VRINTZ_dp()
2829 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VRINTZ_dp()
2833 if (!vfp_access_check(s)) { in trans_VRINTZ_dp()
2852 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VRINTX_hp()
2856 if (!vfp_access_check(s)) { in trans_VRINTX_hp()
2873 if (!dc_isar_feature(aa32_vrint, s)) { in trans_VRINTX_sp()
2877 if (!vfp_access_check(s)) { in trans_VRINTX_sp()
2894 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VRINTX_dp()
2898 if (!dc_isar_feature(aa32_vrint, s)) { in trans_VRINTX_dp()
2902 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VRINTX_dp()
2903 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VRINTX_dp()
2907 if (!vfp_access_check(s)) { in trans_VRINTX_dp()
2924 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCVT_sp()
2928 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCVT_sp()
2929 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_sp()
2933 if (!vfp_access_check(s)) { in trans_VCVT_sp()
2950 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCVT_dp()
2954 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCVT_dp()
2955 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vm & 0x10)) { in trans_VCVT_dp()
2959 if (!vfp_access_check(s)) { in trans_VCVT_dp()
2976 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VCVT_int_hp()
2980 if (!vfp_access_check(s)) { in trans_VCVT_int_hp()
2987 if (a->s) { in trans_VCVT_int_hp()
3003 if (!dc_isar_feature(aa32_fpsp_v2, s)) { in trans_VCVT_int_sp()
3007 if (!vfp_access_check(s)) { in trans_VCVT_int_sp()
3014 if (a->s) { in trans_VCVT_int_sp()
3031 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCVT_int_dp()
3035 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCVT_int_dp()
3036 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_int_dp()
3040 if (!vfp_access_check(s)) { in trans_VCVT_int_dp()
3048 if (a->s) { in trans_VCVT_int_dp()
3064 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VJCVT()
3068 if (!dc_isar_feature(aa32_jscvt, s)) { in trans_VJCVT()
3072 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VJCVT()
3073 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vm & 0x10)) { in trans_VJCVT()
3077 if (!vfp_access_check(s)) { in trans_VJCVT()
3095 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VCVT_fix_hp()
3099 if (!vfp_access_check(s)) { in trans_VCVT_fix_hp()
3151 if (!dc_isar_feature(aa32_fpsp_v3, s)) { in trans_VCVT_fix_sp()
3155 if (!vfp_access_check(s)) { in trans_VCVT_fix_sp()
3208 if (!dc_isar_feature(aa32_fpdp_v3, s)) { in trans_VCVT_fix_dp()
3212 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCVT_fix_dp()
3213 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_fix_dp()
3217 if (!vfp_access_check(s)) { in trans_VCVT_fix_dp()
3268 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VCVT_hp_int()
3272 if (!vfp_access_check(s)) { in trans_VCVT_hp_int()
3280 if (a->s) { in trans_VCVT_hp_int()
3281 if (a->rz) { in trans_VCVT_hp_int()
3287 if (a->rz) { in trans_VCVT_hp_int()
3302 if (!dc_isar_feature(aa32_fpsp_v2, s)) { in trans_VCVT_sp_int()
3306 if (!vfp_access_check(s)) { in trans_VCVT_sp_int()
3314 if (a->s) { in trans_VCVT_sp_int()
3315 if (a->rz) { in trans_VCVT_sp_int()
3321 if (a->rz) { in trans_VCVT_sp_int()
3337 if (!dc_isar_feature(aa32_fpdp_v2, s)) { in trans_VCVT_dp_int()
3341 /* UNDEF accesses to D16-D31 if they don't exist. */ in trans_VCVT_dp_int()
3342 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vm & 0x10)) { in trans_VCVT_dp_int()
3346 if (!vfp_access_check(s)) { in trans_VCVT_dp_int()
3355 if (a->s) { in trans_VCVT_dp_int()
3356 if (a->rz) { in trans_VCVT_dp_int()
3362 if (a->rz) { in trans_VCVT_dp_int()
3376 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VINS()
3380 if (s->vec_len != 0 || s->vec_stride != 0) { in trans_VINS()
3384 if (!vfp_access_check(s)) { in trans_VINS()
3402 if (!dc_isar_feature(aa32_fp16_arith, s)) { in trans_VMOVX()
3406 if (s->vec_len != 0 || s->vec_stride != 0) { in trans_VMOVX()
3410 if (!vfp_access_check(s)) { in trans_VMOVX()