Lines Matching full:1
48 uint64_t s_mask; /* mask bit is 1 if value bit matches msb */
61 int carry_state; /* -1 = non-constant, {0,1} = constant carry-in */
146 ti->z_mask = -1; in init_ts_info()
186 remove_mem_copy_in(ctx, 0, -1); in remove_mem_copy_all()
233 ti->z_mask = -1; in reset_ts()
385 op->args[1] = src; in tcg_opt_gen_mov()
552 return (int32_t)x / ((int32_t)y ? : 1); in do_constant_folding_2()
554 return (int64_t)x / ((int64_t)y ? : 1); in do_constant_folding_2()
558 return (uint32_t)x / ((uint32_t)y ? : 1); in do_constant_folding_2()
560 return (uint64_t)x / ((uint64_t)y ? : 1); in do_constant_folding_2()
564 return (int32_t)x % ((int32_t)y ? : 1); in do_constant_folding_2()
566 return (int64_t)x % ((int64_t)y ? : 1); in do_constant_folding_2()
570 return (uint32_t)x % ((uint32_t)y ? : 1); in do_constant_folding_2()
572 return (uint64_t)x % ((uint64_t)y ? : 1); in do_constant_folding_2()
671 return 1; in do_constant_folding_cond_eq()
674 return -1; in do_constant_folding_cond_eq()
683 * Return -1 if the condition can't be simplified,
684 * and the result of the condition (0 or 1) if it can.
700 return -1; in do_constant_folding_cond()
711 return 1; in do_constant_folding_cond()
713 return -1; in do_constant_folding_cond()
716 return -1; in do_constant_folding_cond()
759 sum += pref_commutative(arg_info(p1[1])); in swap_commutative2()
761 sum -= pref_commutative(arg_info(p2[1])); in swap_commutative2()
765 t = p1[1], p1[1] = p2[1], p2[1] = t; in swap_commutative2()
772 * Return -1 if the condition can't be simplified,
773 * and the result of the condition (0 or 1) if it can.
794 return -1; in do_constant_folding_cond1()
807 return -1; in do_constant_folding_cond1()
814 return -1; in do_constant_folding_cond1()
823 op2->args[1] = *p1; in do_constant_folding_cond1()
830 return -1; in do_constant_folding_cond1()
847 ah = args[1]; in do_constant_folding_cond2()
874 return 1; in do_constant_folding_cond2()
880 /* TSTNE x,-1 -> NE x,0 */ in do_constant_folding_cond2()
881 if (b == -1 && is_tst_cond(c)) { in do_constant_folding_cond2()
884 return -1; in do_constant_folding_cond2()
892 return -1; in do_constant_folding_cond2()
906 return -1; in do_constant_folding_cond2()
918 op1->args[1] = al; in do_constant_folding_cond2()
921 op2->args[1] = ah; in do_constant_folding_cond2()
925 args[1] = t2; in do_constant_folding_cond2()
929 return -1; in do_constant_folding_cond2()
991 if (arg_is_const(op->args[1])) { in fold_const1()
994 t = arg_info(op->args[1])->val; in fold_const1()
1003 if (arg_is_const(op->args[1]) && arg_is_const(op->args[2])) { in fold_const2()
1004 uint64_t t1 = arg_info(op->args[1])->val; in fold_const2()
1015 swap_commutative(op->args[0], &op->args[1], &op->args[2]); in fold_commutative()
1021 swap_commutative(op->args[0], &op->args[1], &op->args[2]); in fold_const2_commutative()
1040 tcg_debug_assert(def->nb_oargs == 1); in fold_masks_zs()
1067 rep = MAX(rep - 1, 0); in fold_masks_zs()
1080 return fold_masks_zs(ctx, op, -1, s_mask); in fold_masks_s()
1094 return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]); in fold_affected_mask()
1127 op->args[1] = op->args[idx]; in fold_to_not()
1136 if (arg_is_const_val(op->args[1], i)) { in fold_ix_to_i()
1145 if (arg_is_const_val(op->args[1], i)) { in fold_ix_to_not()
1164 return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]); in fold_xi_to_x()
1173 return fold_to_not(ctx, op, 1); in fold_xi_to_not()
1181 if (args_are_copies(op->args[1], op->args[2])) { in fold_xx_to_i()
1190 if (args_are_copies(op->args[1], op->args[2])) { in fold_xx_to_x()
1191 return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]); in fold_xx_to_x()
1200 * 1) those that produce a constant
1247 op->args[2] = arg_new_constant(ctx, ti_const_val(t2) + 1); in squash_prev_carryout()
1254 op->args[1] = ret; in squash_prev_carryout()
1255 op->args[2] = arg_new_constant(ctx, 1); in squash_prev_carryout()
1279 * Otherwise emit a second add +1. in fold_addci()
1282 op->args[2] = arg_new_constant(ctx, ti_const_val(t2) + 1); in fold_addci()
1287 op2->args[1] = op->args[1]; in fold_addci()
1291 op->args[1] = op->args[0]; in fold_addci()
1292 op->args[2] = arg_new_constant(ctx, 1); in fold_addci()
1296 ctx->carry_state = -1; in fold_addci()
1303 int carry_out = -1; in fold_addcio()
1307 t1 = arg_info(op->args[1]); in fold_addcio()
1335 op->args[2] = arg_new_constant(ctx, v + 1); in fold_addcio()
1339 carry_out = 1; in fold_addcio()
1344 op->args[1] = arg_new_constant(ctx, v + 1); in fold_addcio()
1347 carry_out = 1; in fold_addcio()
1363 int carry_out = -1; in fold_addco()
1367 t1 = arg_info(op->args[1]); in fold_addco()
1401 fold_xi_to_x(ctx, op, -1) || in fold_and()
1406 t1 = arg_info(op->args[1]); in fold_and()
1422 * Sign repetitions are perforce all identical, whether they are 1 or 0. in fold_and()
1438 fold_ix_to_not(ctx, op, -1)) { in fold_andc()
1442 t1 = arg_info(op->args[1]); in fold_andc()
1492 if (tv == -1 && fv == 0) { in fold_bitsel_vec()
1493 return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]); in fold_bitsel_vec()
1495 if (tv == 0 && fv == -1) { in fold_bitsel_vec()
1501 op->args[2] = arg_new_constant(ctx, -1); in fold_bitsel_vec()
1508 if (tv == -1) { in fold_bitsel_vec()
1515 op->args[2] = op->args[1]; in fold_bitsel_vec()
1516 op->args[1] = op->args[3]; in fold_bitsel_vec()
1526 if (fv == -1 && TCG_TARGET_HAS_orc_vec) { in fold_bitsel_vec()
1528 op->args[2] = op->args[1]; in fold_bitsel_vec()
1529 op->args[1] = op->args[3]; in fold_bitsel_vec()
1539 &op->args[1], &op->args[2]); in fold_brcond()
1581 inv = 1; in fold_brcond2()
1593 case 1: in fold_brcond2()
1597 i = do_constant_folding_cond(TCG_TYPE_I32, op->args[1], in fold_brcond2()
1602 case 1: in fold_brcond2()
1622 op->args[1] = op->args[2]; in fold_brcond2()
1629 op->args[0] = op->args[1]; in fold_brcond2()
1630 op->args[1] = op->args[3]; in fold_brcond2()
1653 TempOptInfo *t1 = arg_info(op->args[1]); in fold_bswap()
1685 /* If the sign bit may be 1, force all the bits above to 1. */ in fold_bswap()
1693 /* The high bits are undefined: force all bits above the sign to 1. */ in fold_bswap()
1694 z_mask |= sign << 1; in fold_bswap()
1741 if (swap_commutative(NO_DEST, &op->args[1], &op->args[2])) { in fold_cmp_vec()
1755 if (swap_commutative(NO_DEST, &op->args[1], &op->args[2])) { in fold_cmpsel_vec()
1771 TempOptInfo *t1 = arg_info(op->args[1]); in fold_count_zeros()
1824 TempOptInfo *t1 = arg_info(op->args[1]); in fold_deposit()
1842 op->args[1] = op->args[2]; in fold_deposit()
1849 uint64_t mask = deposit64(-1, ofs, len, 0); in fold_deposit()
1870 fold_xi_to_x(ctx, op, 1)) { in fold_divide()
1878 if (arg_is_const(op->args[1])) { in fold_dup()
1879 uint64_t t = arg_info(op->args[1])->val; in fold_dup()
1888 if (arg_is_const(op->args[1]) && arg_is_const(op->args[2])) { in fold_dup2()
1889 uint64_t t = deposit64(arg_info(op->args[1])->val, 32, 32, in fold_dup2()
1894 if (args_are_copies(op->args[1], op->args[2])) { in fold_dup2()
1907 fold_xi_to_x(ctx, op, -1) || in fold_eqv()
1932 t1 = arg_info(op->args[1]); in fold_eqv()
1940 TempOptInfo *t1 = arg_info(op->args[1]); in fold_extract()
1960 if (arg_is_const(op->args[1]) && arg_is_const(op->args[2])) { in fold_extract2()
1961 uint64_t v1 = arg_info(op->args[1])->val; in fold_extract2()
1986 t1 = arg_info(op->args[1]); in fold_exts()
2009 z_mask = arg_info(op->args[1])->z_mask; in fold_extu()
2050 return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]); in fold_mov()
2072 i = do_constant_folding_cond1(ctx, op, NO_DEST, &op->args[1], in fold_movcond()
2088 if (tv == 1 && fv == 0) { in fold_movcond()
2091 } else if (fv == 1 && tv == 0) { in fold_movcond()
2094 } else if (tv == -1 && fv == 0) { in fold_movcond()
2097 } else if (fv == -1 && tv == 0) { in fold_movcond()
2110 fold_xi_to_x(ctx, op, 1)) { in fold_mul()
2160 rh = op->args[1]; in fold_multiply2()
2177 fold_xi_to_not(ctx, op, -1)) { in fold_nand()
2181 s_mask = arg_info(op->args[1])->s_mask in fold_nand()
2188 /* Set to 1 all bits to the left of the rightmost. */ in fold_neg_no_const()
2189 uint64_t z_mask = arg_info(op->args[1])->z_mask; in fold_neg_no_const()
2209 s_mask = arg_info(op->args[1])->s_mask in fold_nor()
2219 return fold_masks_s(ctx, op, arg_info(op->args[1])->s_mask); in fold_not()
2233 t1 = arg_info(op->args[1]); in fold_or()
2246 fold_xx_to_i(ctx, op, -1) || in fold_orc()
2247 fold_xi_to_x(ctx, op, -1) || in fold_orc()
2272 t1 = arg_info(op->args[1]); in fold_orc()
2283 uint64_t z_mask = -1, s_mask = 0; in fold_qemu_ld_1reg()
2287 s_mask = MAKE_64BIT_MASK(width - 1, 64 - (width - 1)); in fold_qemu_ld_1reg()
2322 /* Return 1 if finished, -1 if simplified, 0 if unchanged. */
2332 a_zmask = arg_info(op->args[1])->z_mask; in fold_setcond_zmask()
2365 if (a_zmask <= 1) { in fold_setcond_zmask()
2382 convert = (b_val == 1); in fold_setcond_zmask()
2389 return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]); in fold_setcond_zmask()
2396 op->args[2] = arg_new_constant(ctx, -1); in fold_setcond_zmask()
2399 op->args[2] = arg_new_constant(ctx, 1); in fold_setcond_zmask()
2401 return -1; in fold_setcond_zmask()
2428 src1 = op->args[1]; in fold_setcond_tst_pow2()
2431 if (sh && neg && !inv && TCG_TARGET_sextract_valid(ctx->type, sh, 1)) { in fold_setcond_tst_pow2()
2433 op->args[1] = src1; in fold_setcond_tst_pow2()
2435 op->args[3] = 1; in fold_setcond_tst_pow2()
2437 } else if (sh && TCG_TARGET_extract_valid(ctx->type, sh, 1)) { in fold_setcond_tst_pow2()
2439 op->args[1] = src1; in fold_setcond_tst_pow2()
2441 op->args[3] = 1; in fold_setcond_tst_pow2()
2446 op2->args[1] = src1; in fold_setcond_tst_pow2()
2451 op->args[1] = src1; in fold_setcond_tst_pow2()
2452 op->args[2] = arg_new_constant(ctx, 1); in fold_setcond_tst_pow2()
2458 op2->args[1] = ret; in fold_setcond_tst_pow2()
2459 op2->args[2] = arg_new_constant(ctx, -1); in fold_setcond_tst_pow2()
2463 op2->args[1] = ret; in fold_setcond_tst_pow2()
2464 op2->args[2] = arg_new_constant(ctx, 1); in fold_setcond_tst_pow2()
2468 op2->args[1] = ret; in fold_setcond_tst_pow2()
2474 int i = do_constant_folding_cond1(ctx, op, op->args[0], &op->args[1], in fold_setcond()
2488 return fold_masks_z(ctx, op, 1); in fold_setcond()
2493 int i = do_constant_folding_cond1(ctx, op, op->args[0], &op->args[1], in fold_negsetcond()
2507 /* Value is {0,-1} so all bits are repetitions of the sign. */ in fold_negsetcond()
2508 return fold_masks_s(ctx, op, -1); in fold_negsetcond()
2516 i = do_constant_folding_cond2(ctx, op, &op->args[1]); in fold_setcond2()
2536 inv = 1; in fold_setcond2()
2543 i = do_constant_folding_cond(TCG_TYPE_I32, op->args[1], in fold_setcond2()
2548 case 1: in fold_setcond2()
2557 case 1: in fold_setcond2()
2582 op->args[1] = op->args[2]; in fold_setcond2()
2589 return fold_masks_z(ctx, op, 1); in fold_setcond2()
2598 TempOptInfo *t1 = arg_info(op->args[1]); in fold_sextract()
2609 s_mask |= -1ull << (len - 1); in fold_sextract()
2630 t1 = arg_info(op->args[1]); in fold_shift()
2672 if (!arg_is_const(op->args[1]) || arg_info(op->args[1])->val != 0) { in fold_sub_to_neg()
2694 op->args[1] = op->args[2]; in fold_sub_to_neg()
2747 op->args[2] = arg_new_constant(ctx, -(ti_const_val(t2) + 1)); in squash_prev_borrowout()
2755 op->args[1] = ret; in squash_prev_borrowout()
2756 op->args[2] = arg_new_constant(ctx, -1); in squash_prev_borrowout()
2772 ctx->carry_state = -1; in fold_subbi()
2783 * separate add -1. in fold_subbi()
2787 op->args[2] = arg_new_constant(ctx, -(ti_const_val(t2) + 1)); in fold_subbi()
2792 op2->args[1] = op->args[1]; in fold_subbi()
2796 op->args[1] = op->args[0]; in fold_subbi()
2797 op->args[2] = arg_new_constant(ctx, -1); in fold_subbi()
2806 int borrow_out = -1; in fold_subbio()
2817 t1 = arg_info(op->args[1]); in fold_subbio()
2826 op->args[2] = arg_new_constant(ctx, v + 1); in fold_subbio()
2829 /* subtracting max + 1 produces known borrow out. */ in fold_subbio()
2830 borrow_out = 1; in fold_subbio()
2835 op->args[2] = arg_new_constant(ctx, v - 1); in fold_subbio()
2852 TempOptInfo *t1 = arg_info(op->args[1]); in fold_subbo()
2854 int borrow_out = -1; in fold_subbo()
2871 uint64_t z_mask = -1, s_mask = 0; in fold_tcg_ld()
2905 if (op->args[1] != tcgv_ptr_arg(tcg_env)) { in fold_tcg_ld_memcopy()
2918 record_mem_copy(ctx, type, dst, ofs, ofs + tcg_type_size(type) - 1); in fold_tcg_ld_memcopy()
2927 if (op->args[1] != tcgv_ptr_arg(tcg_env)) { in fold_tcg_st()
2937 lm1 = 1; in fold_tcg_st()
2944 lm1 = tcg_type_size(ctx->type) - 1; in fold_tcg_st()
2959 if (op->args[1] != tcgv_ptr_arg(tcg_env)) { in fold_tcg_st_memcopy()
2979 last = ofs + tcg_type_size(type) - 1; in fold_tcg_st_memcopy()
2993 fold_xi_to_not(ctx, op, -1)) { in fold_xor()
2997 t1 = arg_info(op->args[1]); in fold_xor()