Lines Matching refs:vd

325         ((a->vm | a->vn | a->vd) & 0x10)) {  in trans_VSEL()
329 rd = a->vd; in trans_VSEL()
451 ((a->vm | a->vd) & 0x10)) { in trans_VRINT()
455 rd = a->vd; in trans_VRINT()
522 rd = a->vd; in trans_VCVT()
1054 vfp_store_reg32(tmp, a->vd); in trans_VLDR_VSTR_hp()
1056 vfp_load_reg32(tmp, a->vd); in trans_VLDR_VSTR_hp()
1085 vfp_store_reg32(tmp, a->vd); in trans_VLDR_VSTR_sp()
1087 vfp_load_reg32(tmp, a->vd); in trans_VLDR_VSTR_sp()
1105 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VLDR_VSTR_dp()
1123 vfp_store_reg64(tmp, a->vd); in trans_VLDR_VSTR_dp()
1125 vfp_load_reg64(tmp, a->vd); in trans_VLDR_VSTR_dp()
1143 if (n == 0 || (a->vd + n) > 32) { in trans_VLDM_VSTM_sp()
1186 vfp_store_reg32(tmp, a->vd + i); in trans_VLDM_VSTM_sp()
1189 vfp_load_reg32(tmp, a->vd + i); in trans_VLDM_VSTM_sp()
1221 if (n == 0 || (a->vd + n) > 32 || n > 16) { in trans_VLDM_VSTM_dp()
1234 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd + n) > 16) { in trans_VLDM_VSTM_dp()
1269 vfp_store_reg64(tmp, a->vd + i); in trans_VLDM_VSTM_dp()
1272 vfp_load_reg64(tmp, a->vd + i); in trans_VLDM_VSTM_dp()
1304 typedef void VFPGen3OpSPFn(TCGv_i32 vd,
1306 typedef void VFPGen3OpDPFn(TCGv_i64 vd,
1314 typedef void VFPGen2OpSPFn(TCGv_i32 vd, TCGv_i32 vm);
1315 typedef void VFPGen2OpDPFn(TCGv_i64 vd, TCGv_i64 vm);
1359 int vd, int vn, int vm, bool reads_vd) in do_vfp_3op_sp() argument
1382 if (vfp_sreg_is_scalar(vd)) { in do_vfp_3op_sp()
1408 vfp_load_reg32(fd, vd); in do_vfp_3op_sp()
1411 vfp_store_reg32(fd, vd); in do_vfp_3op_sp()
1419 vd = vfp_advance_sreg(vd, delta_d); in do_vfp_3op_sp()
1431 int vd, int vn, int vm, bool reads_vd) in do_vfp_3op_hp() argument
1465 vfp_load_reg16(fd, vd); in do_vfp_3op_hp()
1468 vfp_store_reg32(fd, vd); in do_vfp_3op_hp()
1473 int vd, int vn, int vm, bool reads_vd) in do_vfp_3op_dp() argument
1486 if (!dc_isar_feature(aa32_simd_r32, s) && ((vd | vn | vm) & 0x10)) { in do_vfp_3op_dp()
1501 if (vfp_dreg_is_scalar(vd)) { in do_vfp_3op_dp()
1527 vfp_load_reg64(fd, vd); in do_vfp_3op_dp()
1530 vfp_store_reg64(fd, vd); in do_vfp_3op_dp()
1537 vd = vfp_advance_dreg(vd, delta_d); in do_vfp_3op_dp()
1548 static bool do_vfp_2op_sp(DisasContext *s, VFPGen2OpSPFn *fn, int vd, int vm) in do_vfp_2op_sp() argument
1568 if (vfp_sreg_is_scalar(vd)) { in do_vfp_2op_sp()
1591 vfp_store_reg32(fd, vd); in do_vfp_2op_sp()
1600 vd = vfp_advance_sreg(vd, delta_d); in do_vfp_2op_sp()
1601 vfp_store_reg32(fd, vd); in do_vfp_2op_sp()
1608 vd = vfp_advance_sreg(vd, delta_d); in do_vfp_2op_sp()
1615 static bool do_vfp_2op_hp(DisasContext *s, VFPGen2OpSPFn *fn, int vd, int vm) in do_vfp_2op_hp() argument
1643 vfp_store_reg32(f0, vd); in do_vfp_2op_hp()
1648 static bool do_vfp_2op_dp(DisasContext *s, VFPGen2OpDPFn *fn, int vd, int vm) in do_vfp_2op_dp() argument
1658 if (!dc_isar_feature(aa32_simd_r32, s) && ((vd | vm) & 0x10)) { in do_vfp_2op_dp()
1673 if (vfp_dreg_is_scalar(vd)) { in do_vfp_2op_dp()
1696 vfp_store_reg64(fd, vd); in do_vfp_2op_dp()
1705 vd = vfp_advance_dreg(vd, delta_d); in do_vfp_2op_dp()
1706 vfp_store_reg64(fd, vd); in do_vfp_2op_dp()
1713 vd = vfp_advance_dreg(vd, delta_d); in do_vfp_2op_dp()
1714 vd = vfp_advance_dreg(vm, delta_m); in do_vfp_2op_dp()
1720 static void gen_VMLA_hp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VMLA_hp() argument
1726 gen_helper_vfp_addh(vd, vd, tmp, fpst); in gen_VMLA_hp()
1731 return do_vfp_3op_hp(s, gen_VMLA_hp, a->vd, a->vn, a->vm, true); in trans_VMLA_hp()
1734 static void gen_VMLA_sp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VMLA_sp() argument
1740 gen_helper_vfp_adds(vd, vd, tmp, fpst); in gen_VMLA_sp()
1745 return do_vfp_3op_sp(s, gen_VMLA_sp, a->vd, a->vn, a->vm, true); in trans_VMLA_sp()
1748 static void gen_VMLA_dp(TCGv_i64 vd, TCGv_i64 vn, TCGv_i64 vm, TCGv_ptr fpst) in gen_VMLA_dp() argument
1754 gen_helper_vfp_addd(vd, vd, tmp, fpst); in gen_VMLA_dp()
1759 return do_vfp_3op_dp(s, gen_VMLA_dp, a->vd, a->vn, a->vm, true); in trans_VMLA_dp()
1762 static void gen_VMLS_hp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VMLS_hp() argument
1772 gen_helper_vfp_addh(vd, vd, tmp, fpst); in gen_VMLS_hp()
1777 return do_vfp_3op_hp(s, gen_VMLS_hp, a->vd, a->vn, a->vm, true); in trans_VMLS_hp()
1780 static void gen_VMLS_sp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VMLS_sp() argument
1790 gen_helper_vfp_adds(vd, vd, tmp, fpst); in gen_VMLS_sp()
1795 return do_vfp_3op_sp(s, gen_VMLS_sp, a->vd, a->vn, a->vm, true); in trans_VMLS_sp()
1798 static void gen_VMLS_dp(TCGv_i64 vd, TCGv_i64 vn, TCGv_i64 vm, TCGv_ptr fpst) in gen_VMLS_dp() argument
1808 gen_helper_vfp_addd(vd, vd, tmp, fpst); in gen_VMLS_dp()
1813 return do_vfp_3op_dp(s, gen_VMLS_dp, a->vd, a->vn, a->vm, true); in trans_VMLS_dp()
1816 static void gen_VNMLS_hp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VNMLS_hp() argument
1827 gen_vfp_negh(vd, vd); in gen_VNMLS_hp()
1828 gen_helper_vfp_addh(vd, vd, tmp, fpst); in gen_VNMLS_hp()
1833 return do_vfp_3op_hp(s, gen_VNMLS_hp, a->vd, a->vn, a->vm, true); in trans_VNMLS_hp()
1836 static void gen_VNMLS_sp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VNMLS_sp() argument
1847 gen_vfp_negs(vd, vd); in gen_VNMLS_sp()
1848 gen_helper_vfp_adds(vd, vd, tmp, fpst); in gen_VNMLS_sp()
1853 return do_vfp_3op_sp(s, gen_VNMLS_sp, a->vd, a->vn, a->vm, true); in trans_VNMLS_sp()
1856 static void gen_VNMLS_dp(TCGv_i64 vd, TCGv_i64 vn, TCGv_i64 vm, TCGv_ptr fpst) in gen_VNMLS_dp() argument
1867 gen_vfp_negd(vd, vd); in gen_VNMLS_dp()
1868 gen_helper_vfp_addd(vd, vd, tmp, fpst); in gen_VNMLS_dp()
1873 return do_vfp_3op_dp(s, gen_VNMLS_dp, a->vd, a->vn, a->vm, true); in trans_VNMLS_dp()
1876 static void gen_VNMLA_hp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VNMLA_hp() argument
1883 gen_vfp_negh(vd, vd); in gen_VNMLA_hp()
1884 gen_helper_vfp_addh(vd, vd, tmp, fpst); in gen_VNMLA_hp()
1889 return do_vfp_3op_hp(s, gen_VNMLA_hp, a->vd, a->vn, a->vm, true); in trans_VNMLA_hp()
1892 static void gen_VNMLA_sp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VNMLA_sp() argument
1899 gen_vfp_negs(vd, vd); in gen_VNMLA_sp()
1900 gen_helper_vfp_adds(vd, vd, tmp, fpst); in gen_VNMLA_sp()
1905 return do_vfp_3op_sp(s, gen_VNMLA_sp, a->vd, a->vn, a->vm, true); in trans_VNMLA_sp()
1908 static void gen_VNMLA_dp(TCGv_i64 vd, TCGv_i64 vn, TCGv_i64 vm, TCGv_ptr fpst) in gen_VNMLA_dp() argument
1915 gen_vfp_negd(vd, vd); in gen_VNMLA_dp()
1916 gen_helper_vfp_addd(vd, vd, tmp, fpst); in gen_VNMLA_dp()
1921 return do_vfp_3op_dp(s, gen_VNMLA_dp, a->vd, a->vn, a->vm, true); in trans_VNMLA_dp()
1926 return do_vfp_3op_hp(s, gen_helper_vfp_mulh, a->vd, a->vn, a->vm, false); in trans_VMUL_hp()
1931 return do_vfp_3op_sp(s, gen_helper_vfp_muls, a->vd, a->vn, a->vm, false); in trans_VMUL_sp()
1936 return do_vfp_3op_dp(s, gen_helper_vfp_muld, a->vd, a->vn, a->vm, false); in trans_VMUL_dp()
1939 static void gen_VNMUL_hp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VNMUL_hp() argument
1942 gen_helper_vfp_mulh(vd, vn, vm, fpst); in gen_VNMUL_hp()
1943 gen_vfp_negh(vd, vd); in gen_VNMUL_hp()
1948 return do_vfp_3op_hp(s, gen_VNMUL_hp, a->vd, a->vn, a->vm, false); in trans_VNMUL_hp()
1951 static void gen_VNMUL_sp(TCGv_i32 vd, TCGv_i32 vn, TCGv_i32 vm, TCGv_ptr fpst) in gen_VNMUL_sp() argument
1954 gen_helper_vfp_muls(vd, vn, vm, fpst); in gen_VNMUL_sp()
1955 gen_vfp_negs(vd, vd); in gen_VNMUL_sp()
1960 return do_vfp_3op_sp(s, gen_VNMUL_sp, a->vd, a->vn, a->vm, false); in trans_VNMUL_sp()
1963 static void gen_VNMUL_dp(TCGv_i64 vd, TCGv_i64 vn, TCGv_i64 vm, TCGv_ptr fpst) in gen_VNMUL_dp() argument
1966 gen_helper_vfp_muld(vd, vn, vm, fpst); in gen_VNMUL_dp()
1967 gen_vfp_negd(vd, vd); in gen_VNMUL_dp()
1972 return do_vfp_3op_dp(s, gen_VNMUL_dp, a->vd, a->vn, a->vm, false); in trans_VNMUL_dp()
1977 return do_vfp_3op_hp(s, gen_helper_vfp_addh, a->vd, a->vn, a->vm, false); in trans_VADD_hp()
1982 return do_vfp_3op_sp(s, gen_helper_vfp_adds, a->vd, a->vn, a->vm, false); in trans_VADD_sp()
1987 return do_vfp_3op_dp(s, gen_helper_vfp_addd, a->vd, a->vn, a->vm, false); in trans_VADD_dp()
1992 return do_vfp_3op_hp(s, gen_helper_vfp_subh, a->vd, a->vn, a->vm, false); in trans_VSUB_hp()
1997 return do_vfp_3op_sp(s, gen_helper_vfp_subs, a->vd, a->vn, a->vm, false); in trans_VSUB_sp()
2002 return do_vfp_3op_dp(s, gen_helper_vfp_subd, a->vd, a->vn, a->vm, false); in trans_VSUB_dp()
2007 return do_vfp_3op_hp(s, gen_helper_vfp_divh, a->vd, a->vn, a->vm, false); in trans_VDIV_hp()
2012 return do_vfp_3op_sp(s, gen_helper_vfp_divs, a->vd, a->vn, a->vm, false); in trans_VDIV_sp()
2017 return do_vfp_3op_dp(s, gen_helper_vfp_divd, a->vd, a->vn, a->vm, false); in trans_VDIV_dp()
2026 a->vd, a->vn, a->vm, false); in trans_VMINNM_hp()
2035 a->vd, a->vn, a->vm, false); in trans_VMAXNM_hp()
2044 a->vd, a->vn, a->vm, false); in trans_VMINNM_sp()
2053 a->vd, a->vn, a->vm, false); in trans_VMAXNM_sp()
2062 a->vd, a->vn, a->vm, false); in trans_VMINNM_dp()
2071 a->vd, a->vn, a->vm, false); in trans_VMAXNM_dp()
2089 TCGv_i32 vn, vm, vd; in do_vfm_hp() local
2112 vd = tcg_temp_new_i32(); in do_vfm_hp()
2120 vfp_load_reg16(vd, a->vd); in do_vfm_hp()
2123 gen_vfp_negh(vd, vd); in do_vfm_hp()
2126 gen_helper_vfp_muladdh(vd, vn, vm, vd, fpst); in do_vfm_hp()
2127 vfp_store_reg32(vd, a->vd); in do_vfm_hp()
2146 TCGv_i32 vn, vm, vd; in do_vfm_sp() local
2171 vd = tcg_temp_new_i32(); in do_vfm_sp()
2179 vfp_load_reg32(vd, a->vd); in do_vfm_sp()
2182 gen_vfp_negs(vd, vd); in do_vfm_sp()
2185 gen_helper_vfp_muladds(vd, vn, vm, vd, fpst); in do_vfm_sp()
2186 vfp_store_reg32(vd, a->vd); in do_vfm_sp()
2205 TCGv_i64 vn, vm, vd; in do_vfm_dp() local
2226 ((a->vd | a->vn | a->vm) & 0x10)) { in do_vfm_dp()
2236 vd = tcg_temp_new_i64(); in do_vfm_dp()
2244 vfp_load_reg64(vd, a->vd); in do_vfm_dp()
2247 gen_vfp_negd(vd, vd); in do_vfm_dp()
2250 gen_helper_vfp_muladdd(vd, vn, vm, vd, fpst); in do_vfm_dp()
2251 vfp_store_reg64(vd, a->vd); in do_vfm_dp()
2286 vfp_store_reg32(tcg_constant_i32(vfp_expand_imm(MO_16, a->imm)), a->vd); in MAKE_VFM_TRANS_FNS()
2295 uint32_t vd; in trans_VMOV_imm_sp() local
2297 vd = a->vd; in trans_VMOV_imm_sp()
2314 if (vfp_sreg_is_scalar(vd)) { in trans_VMOV_imm_sp()
2325 vfp_store_reg32(fd, vd); in trans_VMOV_imm_sp()
2333 vd = vfp_advance_sreg(vd, delta_d); in trans_VMOV_imm_sp()
2344 uint32_t vd; in trans_VMOV_imm_dp() local
2346 vd = a->vd; in trans_VMOV_imm_dp()
2353 if (!dc_isar_feature(aa32_simd_r32, s) && (vd & 0x10)) { in trans_VMOV_imm_dp()
2368 if (vfp_dreg_is_scalar(vd)) { in trans_VMOV_imm_dp()
2379 vfp_store_reg64(fd, vd); in trans_VMOV_imm_dp()
2387 vd = vfp_advance_dreg(vd, delta_d); in trans_VMOV_imm_dp()
2400 return do_vfp_2op_##PREC(s, FN, a->vd, a->vm); \
2411 return do_vfp_2op_##PREC(s, FN, a->vd, a->vm); \
2425 static void gen_VSQRT_hp(TCGv_i32 vd, TCGv_i32 vm) in DO_VFP_VMOV()
2427 gen_helper_vfp_sqrth(vd, vm, fpstatus_ptr(FPST_A32_F16)); in DO_VFP_VMOV()
2430 static void gen_VSQRT_sp(TCGv_i32 vd, TCGv_i32 vm) in gen_VSQRT_sp() argument
2432 gen_helper_vfp_sqrts(vd, vm, fpstatus_ptr(FPST_A32)); in gen_VSQRT_sp()
2435 static void gen_VSQRT_dp(TCGv_i64 vd, TCGv_i64 vm) in gen_VSQRT_dp() argument
2437 gen_helper_vfp_sqrtd(vd, vm, fpstatus_ptr(FPST_A32)); in gen_VSQRT_dp()
2446 TCGv_i32 vd, vm; in DO_VFP_2OP() local
2461 vd = tcg_temp_new_i32(); in DO_VFP_2OP()
2464 vfp_load_reg16(vd, a->vd); in DO_VFP_2OP()
2472 gen_helper_vfp_cmpeh(vd, vm, tcg_env); in DO_VFP_2OP()
2474 gen_helper_vfp_cmph(vd, vm, tcg_env); in DO_VFP_2OP()
2481 TCGv_i32 vd, vm; in trans_VCMP_sp() local
2496 vd = tcg_temp_new_i32(); in trans_VCMP_sp()
2499 vfp_load_reg32(vd, a->vd); in trans_VCMP_sp()
2507 gen_helper_vfp_cmpes(vd, vm, tcg_env); in trans_VCMP_sp()
2509 gen_helper_vfp_cmps(vd, vm, tcg_env); in trans_VCMP_sp()
2516 TCGv_i64 vd, vm; in trans_VCMP_dp() local
2528 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VCMP_dp()
2536 vd = tcg_temp_new_i64(); in trans_VCMP_dp()
2539 vfp_load_reg64(vd, a->vd); in trans_VCMP_dp()
2547 gen_helper_vfp_cmped(vd, vm, tcg_env); in trans_VCMP_dp()
2549 gen_helper_vfp_cmpd(vd, vm, tcg_env); in trans_VCMP_dp()
2574 vfp_store_reg32(tmp, a->vd); in trans_VCVT_f32_f16()
2583 TCGv_i64 vd; in trans_VCVT_f64_f16() local
2594 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_f64_f16()
2607 vd = tcg_temp_new_i64(); in trans_VCVT_f64_f16()
2608 gen_helper_vfp_fcvt_f16_to_f64(vd, tmp, fpst, ahp_mode); in trans_VCVT_f64_f16()
2609 vfp_store_reg64(vd, a->vd); in trans_VCVT_f64_f16()
2631 tcg_gen_st16_i32(tmp, tcg_env, vfp_f16_offset(a->vd, a->t)); in trans_VCVT_b16_f32()
2655 tcg_gen_st16_i32(tmp, tcg_env, vfp_f16_offset(a->vd, a->t)); in trans_VCVT_f16_f32()
2690 tcg_gen_st16_i32(tmp, tcg_env, vfp_f16_offset(a->vd, a->t)); in trans_VCVT_f16_f64()
2711 vfp_store_reg32(tmp, a->vd); in trans_VRINTR_hp()
2732 vfp_store_reg32(tmp, a->vd); in trans_VRINTR_sp()
2750 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VRINTR_dp()
2762 vfp_store_reg64(tmp, a->vd); in trans_VRINTR_dp()
2786 vfp_store_reg32(tmp, a->vd); in trans_VRINTZ_hp()
2810 vfp_store_reg32(tmp, a->vd); in trans_VRINTZ_sp()
2829 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VRINTZ_dp()
2843 vfp_store_reg64(tmp, a->vd); in trans_VRINTZ_dp()
2864 vfp_store_reg32(tmp, a->vd); in trans_VRINTX_hp()
2885 vfp_store_reg32(tmp, a->vd); in trans_VRINTX_sp()
2903 if (!dc_isar_feature(aa32_simd_r32, s) && ((a->vd | a->vm) & 0x10)) { in trans_VRINTX_dp()
2915 vfp_store_reg64(tmp, a->vd); in trans_VRINTX_dp()
2921 TCGv_i64 vd; in trans_VCVT_sp() local
2929 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_sp()
2938 vd = tcg_temp_new_i64(); in trans_VCVT_sp()
2940 gen_helper_vfp_fcvtds(vd, vm, fpstatus_ptr(FPST_A32)); in trans_VCVT_sp()
2941 vfp_store_reg64(vd, a->vd); in trans_VCVT_sp()
2948 TCGv_i32 vd; in trans_VCVT_dp() local
2963 vd = tcg_temp_new_i32(); in trans_VCVT_dp()
2966 gen_helper_vfp_fcvtsd(vd, vm, fpstatus_ptr(FPST_A32)); in trans_VCVT_dp()
2967 vfp_store_reg32(vd, a->vd); in trans_VCVT_dp()
2994 vfp_store_reg32(vm, a->vd); in trans_VCVT_int_hp()
3021 vfp_store_reg32(vm, a->vd); in trans_VCVT_int_sp()
3028 TCGv_i64 vd; in trans_VCVT_int_dp() local
3036 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_int_dp()
3045 vd = tcg_temp_new_i64(); in trans_VCVT_int_dp()
3050 gen_helper_vfp_sitod(vd, vm, fpst); in trans_VCVT_int_dp()
3053 gen_helper_vfp_uitod(vd, vm, fpst); in trans_VCVT_int_dp()
3055 vfp_store_reg64(vd, a->vd); in trans_VCVT_int_dp()
3061 TCGv_i32 vd; in trans_VJCVT() local
3082 vd = tcg_temp_new_i32(); in trans_VJCVT()
3084 gen_helper_vjcvt(vd, vm, tcg_env); in trans_VJCVT()
3085 vfp_store_reg32(vd, a->vd); in trans_VJCVT()
3091 TCGv_i32 vd, shift; in trans_VCVT_fix_hp() local
3105 vd = tcg_temp_new_i32(); in trans_VCVT_fix_hp()
3106 vfp_load_reg32(vd, a->vd); in trans_VCVT_fix_hp()
3114 gen_helper_vfp_shtoh_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3117 gen_helper_vfp_sltoh_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3120 gen_helper_vfp_uhtoh_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3123 gen_helper_vfp_ultoh_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3126 gen_helper_vfp_toshh_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3129 gen_helper_vfp_toslh_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3132 gen_helper_vfp_touhh_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3135 gen_helper_vfp_toulh_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_hp()
3141 vfp_store_reg32(vd, a->vd); in trans_VCVT_fix_hp()
3147 TCGv_i32 vd, shift; in trans_VCVT_fix_sp() local
3161 vd = tcg_temp_new_i32(); in trans_VCVT_fix_sp()
3162 vfp_load_reg32(vd, a->vd); in trans_VCVT_fix_sp()
3170 gen_helper_vfp_shtos_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3173 gen_helper_vfp_sltos_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3176 gen_helper_vfp_uhtos_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3179 gen_helper_vfp_ultos_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3182 gen_helper_vfp_toshs_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3185 gen_helper_vfp_tosls_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3188 gen_helper_vfp_touhs_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3191 gen_helper_vfp_touls_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_sp()
3197 vfp_store_reg32(vd, a->vd); in trans_VCVT_fix_sp()
3203 TCGv_i64 vd; in trans_VCVT_fix_dp() local
3213 if (!dc_isar_feature(aa32_simd_r32, s) && (a->vd & 0x10)) { in trans_VCVT_fix_dp()
3223 vd = tcg_temp_new_i64(); in trans_VCVT_fix_dp()
3224 vfp_load_reg64(vd, a->vd); in trans_VCVT_fix_dp()
3232 gen_helper_vfp_shtod_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3235 gen_helper_vfp_sltod_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3238 gen_helper_vfp_uhtod_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3241 gen_helper_vfp_ultod_round_to_nearest(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3244 gen_helper_vfp_toshd_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3247 gen_helper_vfp_tosld_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3250 gen_helper_vfp_touhd_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3253 gen_helper_vfp_tould_round_to_zero(vd, vd, shift, fpst); in trans_VCVT_fix_dp()
3259 vfp_store_reg64(vd, a->vd); in trans_VCVT_fix_dp()
3293 vfp_store_reg32(vm, a->vd); in trans_VCVT_hp_int()
3327 vfp_store_reg32(vm, a->vd); in trans_VCVT_sp_int()
3333 TCGv_i32 vd; in trans_VCVT_dp_int() local
3352 vd = tcg_temp_new_i32(); in trans_VCVT_dp_int()
3357 gen_helper_vfp_tosizd(vd, vm, fpst); in trans_VCVT_dp_int()
3359 gen_helper_vfp_tosid(vd, vm, fpst); in trans_VCVT_dp_int()
3363 gen_helper_vfp_touizd(vd, vm, fpst); in trans_VCVT_dp_int()
3365 gen_helper_vfp_touid(vd, vm, fpst); in trans_VCVT_dp_int()
3368 vfp_store_reg32(vd, a->vd); in trans_VCVT_dp_int()
3392 vfp_load_reg16(rd, a->vd); in trans_VINS()
3394 vfp_store_reg32(rd, a->vd); in trans_VINS()
3418 vfp_store_reg32(rm, a->vd); in trans_VMOVX()