xref: /qemu/tcg/tcg-op.c (revision ffd642cb2ca25262342311a3bf2e8a77a00e6dfd)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32 #include "tcg-has.h"
33 
34 /*
35  * Encourage the compiler to tail-call to a function, rather than inlining.
36  * Minimizes code size across 99 bottles of beer on the wall.
37  */
38 #define NI  __attribute__((noinline))
39 
tcg_gen_op1(TCGOpcode opc,TCGType type,TCGArg a1)40 TCGOp * NI tcg_gen_op1(TCGOpcode opc, TCGType type, TCGArg a1)
41 {
42     TCGOp *op = tcg_emit_op(opc, 1);
43     TCGOP_TYPE(op) = type;
44     op->args[0] = a1;
45     return op;
46 }
47 
tcg_gen_op2(TCGOpcode opc,TCGType type,TCGArg a1,TCGArg a2)48 TCGOp * NI tcg_gen_op2(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2)
49 {
50     TCGOp *op = tcg_emit_op(opc, 2);
51     TCGOP_TYPE(op) = type;
52     op->args[0] = a1;
53     op->args[1] = a2;
54     return op;
55 }
56 
tcg_gen_op3(TCGOpcode opc,TCGType type,TCGArg a1,TCGArg a2,TCGArg a3)57 TCGOp * NI tcg_gen_op3(TCGOpcode opc, TCGType type, TCGArg a1,
58                        TCGArg a2, TCGArg a3)
59 {
60     TCGOp *op = tcg_emit_op(opc, 3);
61     TCGOP_TYPE(op) = type;
62     op->args[0] = a1;
63     op->args[1] = a2;
64     op->args[2] = a3;
65     return op;
66 }
67 
tcg_gen_op4(TCGOpcode opc,TCGType type,TCGArg a1,TCGArg a2,TCGArg a3,TCGArg a4)68 TCGOp * NI tcg_gen_op4(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2,
69                        TCGArg a3, TCGArg a4)
70 {
71     TCGOp *op = tcg_emit_op(opc, 4);
72     TCGOP_TYPE(op) = type;
73     op->args[0] = a1;
74     op->args[1] = a2;
75     op->args[2] = a3;
76     op->args[3] = a4;
77     return op;
78 }
79 
tcg_gen_op5(TCGOpcode opc,TCGType type,TCGArg a1,TCGArg a2,TCGArg a3,TCGArg a4,TCGArg a5)80 TCGOp * NI tcg_gen_op5(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2,
81                        TCGArg a3, TCGArg a4, TCGArg a5)
82 {
83     TCGOp *op = tcg_emit_op(opc, 5);
84     TCGOP_TYPE(op) = type;
85     op->args[0] = a1;
86     op->args[1] = a2;
87     op->args[2] = a3;
88     op->args[3] = a4;
89     op->args[4] = a5;
90     return op;
91 }
92 
tcg_gen_op6(TCGOpcode opc,TCGType type,TCGArg a1,TCGArg a2,TCGArg a3,TCGArg a4,TCGArg a5,TCGArg a6)93 TCGOp * NI tcg_gen_op6(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2,
94                        TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
95 {
96     TCGOp *op = tcg_emit_op(opc, 6);
97     TCGOP_TYPE(op) = type;
98     op->args[0] = a1;
99     op->args[1] = a2;
100     op->args[2] = a3;
101     op->args[3] = a4;
102     op->args[4] = a5;
103     op->args[5] = a6;
104     return op;
105 }
106 
107 /*
108  * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
109  * assertion check.  Force tail calls to avoid too much code expansion.
110  */
111 #ifdef CONFIG_DEBUG_TCG
112 # define DNI NI
113 #else
114 # define DNI
115 #endif
116 
tcg_gen_op1_i32(TCGOpcode opc,TCGType type,TCGv_i32 a1)117 static void DNI tcg_gen_op1_i32(TCGOpcode opc, TCGType type, TCGv_i32 a1)
118 {
119     tcg_gen_op1(opc, type, tcgv_i32_arg(a1));
120 }
121 
tcg_gen_op1_i64(TCGOpcode opc,TCGType type,TCGv_i64 a1)122 static void DNI tcg_gen_op1_i64(TCGOpcode opc, TCGType type, TCGv_i64 a1)
123 {
124     tcg_gen_op1(opc, type, tcgv_i64_arg(a1));
125 }
126 
tcg_gen_op1i(TCGOpcode opc,TCGType type,TCGArg a1)127 static TCGOp * DNI tcg_gen_op1i(TCGOpcode opc, TCGType type, TCGArg a1)
128 {
129     return tcg_gen_op1(opc, type, a1);
130 }
131 
tcg_gen_op2_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2)132 static void DNI tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2)
133 {
134     tcg_gen_op2(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2));
135 }
136 
tcg_gen_op2_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2)137 static void DNI tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2)
138 {
139     tcg_gen_op2(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2));
140 }
141 
tcg_gen_op3_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3)142 static void DNI tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 a1,
143                                 TCGv_i32 a2, TCGv_i32 a3)
144 {
145     tcg_gen_op3(opc, TCG_TYPE_I32, tcgv_i32_arg(a1),
146                 tcgv_i32_arg(a2), tcgv_i32_arg(a3));
147 }
148 
tcg_gen_op3_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3)149 static void DNI tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 a1,
150                                 TCGv_i64 a2, TCGv_i64 a3)
151 {
152     tcg_gen_op3(opc, TCG_TYPE_I64, tcgv_i64_arg(a1),
153                 tcgv_i64_arg(a2), tcgv_i64_arg(a3));
154 }
155 
tcg_gen_op3i_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGArg a3)156 static void DNI tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 a1,
157                                  TCGv_i32 a2, TCGArg a3)
158 {
159     tcg_gen_op3(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3);
160 }
161 
tcg_gen_op3i_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGArg a3)162 static void DNI tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 a1,
163                                  TCGv_i64 a2, TCGArg a3)
164 {
165     tcg_gen_op3(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3);
166 }
167 
tcg_gen_ldst_op_i32(TCGOpcode opc,TCGv_i32 val,TCGv_ptr base,TCGArg offset)168 static void DNI tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
169                                     TCGv_ptr base, TCGArg offset)
170 {
171     tcg_gen_op3(opc, TCG_TYPE_I32, tcgv_i32_arg(val),
172                 tcgv_ptr_arg(base), offset);
173 }
174 
tcg_gen_ldst_op_i64(TCGOpcode opc,TCGv_i64 val,TCGv_ptr base,TCGArg offset)175 static void DNI tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
176                                     TCGv_ptr base, TCGArg offset)
177 {
178     tcg_gen_op3(opc, TCG_TYPE_I64, tcgv_i64_arg(val),
179                 tcgv_ptr_arg(base), offset);
180 }
181 
tcg_gen_op4_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4)182 static void DNI tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
183                                 TCGv_i32 a3, TCGv_i32 a4)
184 {
185     tcg_gen_op4(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
186                 tcgv_i32_arg(a3), tcgv_i32_arg(a4));
187 }
188 
tcg_gen_op4_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGv_i64 a4)189 static void DNI tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
190                                 TCGv_i64 a3, TCGv_i64 a4)
191 {
192     tcg_gen_op4(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
193                 tcgv_i64_arg(a3), tcgv_i64_arg(a4));
194 }
195 
tcg_gen_op4i_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGArg a4)196 static void DNI tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
197                                  TCGv_i32 a3, TCGArg a4)
198 {
199     tcg_gen_op4(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
200                 tcgv_i32_arg(a3), a4);
201 }
202 
tcg_gen_op4i_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGArg a4)203 static void DNI tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
204                                  TCGv_i64 a3, TCGArg a4)
205 {
206     tcg_gen_op4(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
207                 tcgv_i64_arg(a3), a4);
208 }
209 
tcg_gen_op4ii_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGArg a3,TCGArg a4)210 static TCGOp * DNI tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
211                                      TCGArg a3, TCGArg a4)
212 {
213     return tcg_gen_op4(opc, TCG_TYPE_I32,
214                        tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3, a4);
215 }
216 
tcg_gen_op4ii_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGArg a3,TCGArg a4)217 static TCGOp * DNI tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
218                                      TCGArg a3, TCGArg a4)
219 {
220     return tcg_gen_op4(opc, TCG_TYPE_I64,
221                        tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3, a4);
222 }
223 
tcg_gen_op5_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4,TCGv_i32 a5)224 static void DNI tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
225                                 TCGv_i32 a3, TCGv_i32 a4, TCGv_i32 a5)
226 {
227     tcg_gen_op5(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
228                 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5));
229 }
230 
tcg_gen_op5_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGv_i64 a4,TCGv_i64 a5)231 static void DNI tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
232                                 TCGv_i64 a3, TCGv_i64 a4, TCGv_i64 a5)
233 {
234     tcg_gen_op5(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
235                 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5));
236 }
237 
tcg_gen_op5ii_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGArg a4,TCGArg a5)238 static void DNI tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
239                                   TCGv_i32 a3, TCGArg a4, TCGArg a5)
240 {
241     tcg_gen_op5(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
242                 tcgv_i32_arg(a3), a4, a5);
243 }
244 
tcg_gen_op5ii_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGArg a4,TCGArg a5)245 static void DNI tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
246                                   TCGv_i64 a3, TCGArg a4, TCGArg a5)
247 {
248     tcg_gen_op5(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
249                 tcgv_i64_arg(a3), a4, a5);
250 }
251 
tcg_gen_op6i_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4,TCGv_i32 a5,TCGArg a6)252 static void DNI tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
253                                  TCGv_i32 a3, TCGv_i32 a4,
254                                  TCGv_i32 a5, TCGArg a6)
255 {
256     tcg_gen_op6(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
257                 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5), a6);
258 }
259 
tcg_gen_op6i_i64(TCGOpcode opc,TCGv_i64 a1,TCGv_i64 a2,TCGv_i64 a3,TCGv_i64 a4,TCGv_i64 a5,TCGArg a6)260 static void DNI tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
261                                  TCGv_i64 a3, TCGv_i64 a4,
262                                  TCGv_i64 a5, TCGArg a6)
263 {
264     tcg_gen_op6(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
265                 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5), a6);
266 }
267 
tcg_gen_op6ii_i32(TCGOpcode opc,TCGv_i32 a1,TCGv_i32 a2,TCGv_i32 a3,TCGv_i32 a4,TCGArg a5,TCGArg a6)268 static TCGOp * DNI tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
269                                      TCGv_i32 a3, TCGv_i32 a4,
270                                      TCGArg a5, TCGArg a6)
271 {
272     return tcg_gen_op6(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
273                        tcgv_i32_arg(a3), tcgv_i32_arg(a4), a5, a6);
274 }
275 
276 /* Generic ops.  */
277 
gen_set_label(TCGLabel * l)278 void gen_set_label(TCGLabel *l)
279 {
280     l->present = 1;
281     tcg_gen_op1(INDEX_op_set_label, 0, label_arg(l));
282 }
283 
add_as_label_use(TCGLabel * l,TCGOp * op)284 static void add_as_label_use(TCGLabel *l, TCGOp *op)
285 {
286     TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
287 
288     u->op = op;
289     QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
290 }
291 
tcg_gen_br(TCGLabel * l)292 void tcg_gen_br(TCGLabel *l)
293 {
294     add_as_label_use(l, tcg_gen_op1(INDEX_op_br, 0, label_arg(l)));
295 }
296 
tcg_gen_mb(TCGBar mb_type)297 void tcg_gen_mb(TCGBar mb_type)
298 {
299 #ifdef CONFIG_USER_ONLY
300     bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
301 #else
302     /*
303      * It is tempting to elide the barrier in a uniprocessor context.
304      * However, even with a single cpu we have i/o threads running in
305      * parallel, and lack of memory order can result in e.g. virtio
306      * queue entries being read incorrectly.
307      */
308     bool parallel = true;
309 #endif
310 
311     if (parallel) {
312         tcg_gen_op1(INDEX_op_mb, 0, mb_type);
313     }
314 }
315 
tcg_gen_plugin_cb(unsigned from)316 void tcg_gen_plugin_cb(unsigned from)
317 {
318     tcg_gen_op1(INDEX_op_plugin_cb, 0, from);
319 }
320 
tcg_gen_plugin_mem_cb(TCGv_i64 addr,unsigned meminfo)321 void tcg_gen_plugin_mem_cb(TCGv_i64 addr, unsigned meminfo)
322 {
323     tcg_gen_op2(INDEX_op_plugin_mem_cb, 0, tcgv_i64_arg(addr), meminfo);
324 }
325 
326 /* 32 bit ops */
327 
tcg_gen_discard_i32(TCGv_i32 arg)328 void tcg_gen_discard_i32(TCGv_i32 arg)
329 {
330     tcg_gen_op1_i32(INDEX_op_discard, TCG_TYPE_I32, arg);
331 }
332 
tcg_gen_mov_i32(TCGv_i32 ret,TCGv_i32 arg)333 void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
334 {
335     if (ret != arg) {
336         tcg_gen_op2_i32(INDEX_op_mov, ret, arg);
337     }
338 }
339 
tcg_gen_movi_i32(TCGv_i32 ret,int32_t arg)340 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
341 {
342     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
343 }
344 
tcg_gen_add_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)345 void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
346 {
347     tcg_gen_op3_i32(INDEX_op_add, ret, arg1, arg2);
348 }
349 
tcg_gen_addi_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)350 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
351 {
352     /* some cases can be optimized here */
353     if (arg2 == 0) {
354         tcg_gen_mov_i32(ret, arg1);
355     } else {
356         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
357     }
358 }
359 
tcg_gen_sub_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)360 void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
361 {
362     tcg_gen_op3_i32(INDEX_op_sub, ret, arg1, arg2);
363 }
364 
tcg_gen_subfi_i32(TCGv_i32 ret,int32_t arg1,TCGv_i32 arg2)365 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
366 {
367     if (arg1 == 0) {
368         tcg_gen_neg_i32(ret, arg2);
369     } else {
370         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
371     }
372 }
373 
tcg_gen_subi_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)374 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
375 {
376     tcg_gen_addi_i32(ret, arg1, -arg2);
377 }
378 
tcg_gen_neg_i32(TCGv_i32 ret,TCGv_i32 arg)379 void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
380 {
381     tcg_gen_op2_i32(INDEX_op_neg, ret, arg);
382 }
383 
tcg_gen_and_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)384 void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
385 {
386     tcg_gen_op3_i32(INDEX_op_and, ret, arg1, arg2);
387 }
388 
tcg_gen_andi_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)389 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
390 {
391     /* Some cases can be optimized here.  */
392     switch (arg2) {
393     case 0:
394         tcg_gen_movi_i32(ret, 0);
395         return;
396     case -1:
397         tcg_gen_mov_i32(ret, arg1);
398         return;
399     default:
400         /*
401          * Canonicalize on extract, if valid.  This aids x86 with its
402          * 2 operand MOVZBL and 2 operand AND, selecting the TCGOpcode
403          * which does not require matching operands.  Other backends can
404          * trivially expand the extract to AND during code generation.
405          */
406         if (!(arg2 & (arg2 + 1))) {
407             unsigned len = ctz32(~arg2);
408             if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, len)) {
409                 tcg_gen_extract_i32(ret, arg1, 0, len);
410                 return;
411             }
412         }
413         break;
414     }
415 
416     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
417 }
418 
tcg_gen_or_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)419 void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
420 {
421     tcg_gen_op3_i32(INDEX_op_or, ret, arg1, arg2);
422 }
423 
tcg_gen_ori_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)424 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
425 {
426     /* Some cases can be optimized here.  */
427     if (arg2 == -1) {
428         tcg_gen_movi_i32(ret, -1);
429     } else if (arg2 == 0) {
430         tcg_gen_mov_i32(ret, arg1);
431     } else {
432         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
433     }
434 }
435 
tcg_gen_xor_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)436 void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
437 {
438     tcg_gen_op3_i32(INDEX_op_xor, ret, arg1, arg2);
439 }
440 
tcg_gen_xori_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)441 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
442 {
443     /* Some cases can be optimized here.  */
444     if (arg2 == 0) {
445         tcg_gen_mov_i32(ret, arg1);
446     } else if (arg2 == -1 &&
447                tcg_op_supported(INDEX_op_not, TCG_TYPE_I32, 0)) {
448         /* Don't recurse with tcg_gen_not_i32.  */
449         tcg_gen_op2_i32(INDEX_op_not, ret, arg1);
450     } else {
451         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
452     }
453 }
454 
tcg_gen_not_i32(TCGv_i32 ret,TCGv_i32 arg)455 void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
456 {
457     if (tcg_op_supported(INDEX_op_not, TCG_TYPE_I32, 0)) {
458         tcg_gen_op2_i32(INDEX_op_not, ret, arg);
459     } else {
460         tcg_gen_xori_i32(ret, arg, -1);
461     }
462 }
463 
tcg_gen_shl_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)464 void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
465 {
466     tcg_gen_op3_i32(INDEX_op_shl, ret, arg1, arg2);
467 }
468 
tcg_gen_shli_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)469 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
470 {
471     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
472     if (arg2 == 0) {
473         tcg_gen_mov_i32(ret, arg1);
474     } else {
475         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
476     }
477 }
478 
tcg_gen_shr_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)479 void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
480 {
481     tcg_gen_op3_i32(INDEX_op_shr, ret, arg1, arg2);
482 }
483 
tcg_gen_shri_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)484 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
485 {
486     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
487     if (arg2 == 0) {
488         tcg_gen_mov_i32(ret, arg1);
489     } else {
490         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
491     }
492 }
493 
tcg_gen_sar_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)494 void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
495 {
496     tcg_gen_op3_i32(INDEX_op_sar, ret, arg1, arg2);
497 }
498 
tcg_gen_sari_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)499 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
500 {
501     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
502     if (arg2 == 0) {
503         tcg_gen_mov_i32(ret, arg1);
504     } else {
505         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
506     }
507 }
508 
tcg_gen_brcond_i32(TCGCond cond,TCGv_i32 arg1,TCGv_i32 arg2,TCGLabel * l)509 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
510 {
511     if (cond == TCG_COND_ALWAYS) {
512         tcg_gen_br(l);
513     } else if (cond != TCG_COND_NEVER) {
514         TCGOp *op = tcg_gen_op4ii_i32(INDEX_op_brcond,
515                                       arg1, arg2, cond, label_arg(l));
516         add_as_label_use(l, op);
517     }
518 }
519 
tcg_gen_brcondi_i32(TCGCond cond,TCGv_i32 arg1,int32_t arg2,TCGLabel * l)520 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
521 {
522     if (cond == TCG_COND_ALWAYS) {
523         tcg_gen_br(l);
524     } else if (cond != TCG_COND_NEVER) {
525         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
526     }
527 }
528 
tcg_gen_setcond_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)529 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
530                          TCGv_i32 arg1, TCGv_i32 arg2)
531 {
532     if (cond == TCG_COND_ALWAYS) {
533         tcg_gen_movi_i32(ret, 1);
534     } else if (cond == TCG_COND_NEVER) {
535         tcg_gen_movi_i32(ret, 0);
536     } else {
537         tcg_gen_op4i_i32(INDEX_op_setcond, ret, arg1, arg2, cond);
538     }
539 }
540 
tcg_gen_setcondi_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)541 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
542                           TCGv_i32 arg1, int32_t arg2)
543 {
544     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
545 }
546 
tcg_gen_negsetcond_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)547 void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
548                             TCGv_i32 arg1, TCGv_i32 arg2)
549 {
550     if (cond == TCG_COND_ALWAYS) {
551         tcg_gen_movi_i32(ret, -1);
552     } else if (cond == TCG_COND_NEVER) {
553         tcg_gen_movi_i32(ret, 0);
554     } else {
555         tcg_gen_op4i_i32(INDEX_op_negsetcond, ret, arg1, arg2, cond);
556     }
557 }
558 
tcg_gen_negsetcondi_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)559 void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
560                              TCGv_i32 arg1, int32_t arg2)
561 {
562     tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
563 }
564 
tcg_gen_mul_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)565 void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
566 {
567     tcg_gen_op3_i32(INDEX_op_mul, ret, arg1, arg2);
568 }
569 
tcg_gen_muli_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)570 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
571 {
572     if (arg2 == 0) {
573         tcg_gen_movi_i32(ret, 0);
574     } else if (is_power_of_2(arg2)) {
575         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
576     } else {
577         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
578     }
579 }
580 
tcg_gen_div_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)581 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
582 {
583     if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I32, 0)) {
584         tcg_gen_op3_i32(INDEX_op_divs, ret, arg1, arg2);
585     } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I32, 0)) {
586         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
587         tcg_gen_sari_i32(t0, arg1, 31);
588         tcg_gen_op5_i32(INDEX_op_divs2, ret, t0, arg1, t0, arg2);
589         tcg_temp_free_i32(t0);
590     } else {
591         gen_helper_div_i32(ret, arg1, arg2);
592     }
593 }
594 
tcg_gen_rem_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)595 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
596 {
597     if (tcg_op_supported(INDEX_op_rems, TCG_TYPE_I32, 0)) {
598         tcg_gen_op3_i32(INDEX_op_rems, ret, arg1, arg2);
599     } else if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I32, 0)) {
600         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
601         tcg_gen_op3_i32(INDEX_op_divs, t0, arg1, arg2);
602         tcg_gen_mul_i32(t0, t0, arg2);
603         tcg_gen_sub_i32(ret, arg1, t0);
604         tcg_temp_free_i32(t0);
605     } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I32, 0)) {
606         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
607         tcg_gen_sari_i32(t0, arg1, 31);
608         tcg_gen_op5_i32(INDEX_op_divs2, t0, ret, arg1, t0, arg2);
609         tcg_temp_free_i32(t0);
610     } else {
611         gen_helper_rem_i32(ret, arg1, arg2);
612     }
613 }
614 
tcg_gen_divu_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)615 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
616 {
617     if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I32, 0)) {
618         tcg_gen_op3_i32(INDEX_op_divu, ret, arg1, arg2);
619     } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I32, 0)) {
620         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
621         TCGv_i32 zero = tcg_constant_i32(0);
622         tcg_gen_op5_i32(INDEX_op_divu2, ret, t0, arg1, zero, arg2);
623         tcg_temp_free_i32(t0);
624     } else {
625         gen_helper_divu_i32(ret, arg1, arg2);
626     }
627 }
628 
tcg_gen_remu_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)629 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
630 {
631     if (tcg_op_supported(INDEX_op_remu, TCG_TYPE_I32, 0)) {
632         tcg_gen_op3_i32(INDEX_op_remu, ret, arg1, arg2);
633     } else if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I32, 0)) {
634         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
635         tcg_gen_op3_i32(INDEX_op_divu, t0, arg1, arg2);
636         tcg_gen_mul_i32(t0, t0, arg2);
637         tcg_gen_sub_i32(ret, arg1, t0);
638         tcg_temp_free_i32(t0);
639     } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I32, 0)) {
640         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
641         TCGv_i32 zero = tcg_constant_i32(0);
642         tcg_gen_op5_i32(INDEX_op_divu2, t0, ret, arg1, zero, arg2);
643         tcg_temp_free_i32(t0);
644     } else {
645         gen_helper_remu_i32(ret, arg1, arg2);
646     }
647 }
648 
tcg_gen_andc_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)649 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
650 {
651     if (tcg_op_supported(INDEX_op_andc, TCG_TYPE_I32, 0)) {
652         tcg_gen_op3_i32(INDEX_op_andc, ret, arg1, arg2);
653     } else {
654         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
655         tcg_gen_not_i32(t0, arg2);
656         tcg_gen_and_i32(ret, arg1, t0);
657         tcg_temp_free_i32(t0);
658     }
659 }
660 
tcg_gen_eqv_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)661 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
662 {
663     if (tcg_op_supported(INDEX_op_eqv, TCG_TYPE_I32, 0)) {
664         tcg_gen_op3_i32(INDEX_op_eqv, ret, arg1, arg2);
665     } else {
666         tcg_gen_xor_i32(ret, arg1, arg2);
667         tcg_gen_not_i32(ret, ret);
668     }
669 }
670 
tcg_gen_nand_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)671 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
672 {
673     if (tcg_op_supported(INDEX_op_nand, TCG_TYPE_I32, 0)) {
674         tcg_gen_op3_i32(INDEX_op_nand, ret, arg1, arg2);
675     } else {
676         tcg_gen_and_i32(ret, arg1, arg2);
677         tcg_gen_not_i32(ret, ret);
678     }
679 }
680 
tcg_gen_nor_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)681 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
682 {
683     if (tcg_op_supported(INDEX_op_nor, TCG_TYPE_I32, 0)) {
684         tcg_gen_op3_i32(INDEX_op_nor, ret, arg1, arg2);
685     } else {
686         tcg_gen_or_i32(ret, arg1, arg2);
687         tcg_gen_not_i32(ret, ret);
688     }
689 }
690 
tcg_gen_orc_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)691 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
692 {
693     if (tcg_op_supported(INDEX_op_orc, TCG_TYPE_I32, 0)) {
694         tcg_gen_op3_i32(INDEX_op_orc, ret, arg1, arg2);
695     } else {
696         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
697         tcg_gen_not_i32(t0, arg2);
698         tcg_gen_or_i32(ret, arg1, t0);
699         tcg_temp_free_i32(t0);
700     }
701 }
702 
tcg_gen_clz_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)703 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
704 {
705     if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I32, 0)) {
706         tcg_gen_op3_i32(INDEX_op_clz, ret, arg1, arg2);
707     } else if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
708         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
709         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
710         tcg_gen_extu_i32_i64(t1, arg1);
711         tcg_gen_extu_i32_i64(t2, arg2);
712         tcg_gen_addi_i64(t2, t2, 32);
713         tcg_gen_clz_i64(t1, t1, t2);
714         tcg_gen_extrl_i64_i32(ret, t1);
715         tcg_temp_free_i64(t1);
716         tcg_temp_free_i64(t2);
717         tcg_gen_subi_i32(ret, ret, 32);
718     } else {
719         gen_helper_clz_i32(ret, arg1, arg2);
720     }
721 }
722 
tcg_gen_clzi_i32(TCGv_i32 ret,TCGv_i32 arg1,uint32_t arg2)723 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
724 {
725     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
726 }
727 
tcg_gen_ctz_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)728 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
729 {
730     TCGv_i32 z, t;
731 
732     if (tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I32, 0)) {
733         tcg_gen_op3_i32(INDEX_op_ctz, ret, arg1, arg2);
734         return;
735     }
736     if (tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I64, 0)) {
737         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
738         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
739         tcg_gen_extu_i32_i64(t1, arg1);
740         tcg_gen_extu_i32_i64(t2, arg2);
741         tcg_gen_ctz_i64(t1, t1, t2);
742         tcg_gen_extrl_i64_i32(ret, t1);
743         tcg_temp_free_i64(t1);
744         tcg_temp_free_i64(t2);
745         return;
746     }
747     if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_REG, 0)) {
748         t = tcg_temp_ebb_new_i32();
749         tcg_gen_subi_i32(t, arg1, 1);
750         tcg_gen_andc_i32(t, t, arg1);
751         tcg_gen_ctpop_i32(t, t);
752     } else if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_REG, 0)) {
753         t = tcg_temp_ebb_new_i32();
754         tcg_gen_neg_i32(t, arg1);
755         tcg_gen_and_i32(t, t, arg1);
756         tcg_gen_clzi_i32(t, t, 32);
757         tcg_gen_xori_i32(t, t, 31);
758     } else {
759         gen_helper_ctz_i32(ret, arg1, arg2);
760         return;
761     }
762 
763     z = tcg_constant_i32(0);
764     tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
765     tcg_temp_free_i32(t);
766 }
767 
tcg_gen_ctzi_i32(TCGv_i32 ret,TCGv_i32 arg1,uint32_t arg2)768 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
769 {
770     if (arg2 == 32
771         && !tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I32, 0)
772         && tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_REG, 0)) {
773         /* This equivalence has the advantage of not requiring a fixup.  */
774         TCGv_i32 t = tcg_temp_ebb_new_i32();
775         tcg_gen_subi_i32(t, arg1, 1);
776         tcg_gen_andc_i32(t, t, arg1);
777         tcg_gen_ctpop_i32(ret, t);
778         tcg_temp_free_i32(t);
779     } else {
780         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
781     }
782 }
783 
tcg_gen_clrsb_i32(TCGv_i32 ret,TCGv_i32 arg)784 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
785 {
786     if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_REG, 0)) {
787         TCGv_i32 t = tcg_temp_ebb_new_i32();
788         tcg_gen_sari_i32(t, arg, 31);
789         tcg_gen_xor_i32(t, t, arg);
790         tcg_gen_clzi_i32(t, t, 32);
791         tcg_gen_subi_i32(ret, t, 1);
792         tcg_temp_free_i32(t);
793     } else {
794         gen_helper_clrsb_i32(ret, arg);
795     }
796 }
797 
tcg_gen_ctpop_i32(TCGv_i32 ret,TCGv_i32 arg1)798 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
799 {
800     if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I32, 0)) {
801         tcg_gen_op2_i32(INDEX_op_ctpop, ret, arg1);
802     } else if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
803         TCGv_i64 t = tcg_temp_ebb_new_i64();
804         tcg_gen_extu_i32_i64(t, arg1);
805         tcg_gen_ctpop_i64(t, t);
806         tcg_gen_extrl_i64_i32(ret, t);
807         tcg_temp_free_i64(t);
808     } else {
809         gen_helper_ctpop_i32(ret, arg1);
810     }
811 }
812 
tcg_gen_rotl_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)813 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
814 {
815     if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I32, 0)) {
816         tcg_gen_op3_i32(INDEX_op_rotl, ret, arg1, arg2);
817     } else if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I32, 0)) {
818         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
819         tcg_gen_neg_i32(t0, arg2);
820         tcg_gen_op3_i32(INDEX_op_rotr, ret, arg1, t0);
821         tcg_temp_free_i32(t0);
822     } else {
823         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
824         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
825         tcg_gen_shl_i32(t0, arg1, arg2);
826         tcg_gen_neg_i32(t1, arg2);
827         tcg_gen_shr_i32(t1, arg1, t1);
828         tcg_gen_or_i32(ret, t0, t1);
829         tcg_temp_free_i32(t0);
830         tcg_temp_free_i32(t1);
831     }
832 }
833 
tcg_gen_rotli_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)834 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
835 {
836     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
837     /* some cases can be optimized here */
838     if (arg2 == 0) {
839         tcg_gen_mov_i32(ret, arg1);
840     } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I32, 0)) {
841         TCGv_i32 t0 = tcg_constant_i32(arg2);
842         tcg_gen_op3_i32(INDEX_op_rotl, ret, arg1, t0);
843     } else if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I32, 0)) {
844         TCGv_i32 t0 = tcg_constant_i32(32 - arg2);
845         tcg_gen_op3_i32(INDEX_op_rotr, ret, arg1, t0);
846     } else {
847         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
848         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
849         tcg_gen_shli_i32(t0, arg1, arg2);
850         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
851         tcg_gen_or_i32(ret, t0, t1);
852         tcg_temp_free_i32(t0);
853         tcg_temp_free_i32(t1);
854     }
855 }
856 
tcg_gen_rotr_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2)857 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
858 {
859     if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I32, 0)) {
860         tcg_gen_op3_i32(INDEX_op_rotr, ret, arg1, arg2);
861     } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I32, 0)) {
862         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
863         tcg_gen_neg_i32(t0, arg2);
864         tcg_gen_op3_i32(INDEX_op_rotl, ret, arg1, t0);
865         tcg_temp_free_i32(t0);
866     } else {
867         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
868         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
869         tcg_gen_shr_i32(t0, arg1, arg2);
870         tcg_gen_neg_i32(t1, arg2);
871         tcg_gen_shl_i32(t1, arg1, t1);
872         tcg_gen_or_i32(ret, t0, t1);
873         tcg_temp_free_i32(t0);
874         tcg_temp_free_i32(t1);
875     }
876 }
877 
tcg_gen_rotri_i32(TCGv_i32 ret,TCGv_i32 arg1,int32_t arg2)878 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
879 {
880     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
881     tcg_gen_rotli_i32(ret, arg1, -arg2 & 31);
882 }
883 
tcg_gen_deposit_i32(TCGv_i32 ret,TCGv_i32 arg1,TCGv_i32 arg2,unsigned int ofs,unsigned int len)884 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
885                          unsigned int ofs, unsigned int len)
886 {
887     uint32_t mask;
888     TCGv_i32 t1;
889 
890     tcg_debug_assert(ofs < 32);
891     tcg_debug_assert(len > 0);
892     tcg_debug_assert(len <= 32);
893     tcg_debug_assert(ofs + len <= 32);
894 
895     if (len == 32) {
896         tcg_gen_mov_i32(ret, arg2);
897         return;
898     }
899     if (TCG_TARGET_deposit_valid(TCG_TYPE_I32, ofs, len)) {
900         tcg_gen_op5ii_i32(INDEX_op_deposit, ret, arg1, arg2, ofs, len);
901         return;
902     }
903 
904     t1 = tcg_temp_ebb_new_i32();
905 
906     if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
907         if (ofs + len == 32) {
908             tcg_gen_shli_i32(t1, arg1, len);
909             tcg_gen_extract2_i32(ret, t1, arg2, len);
910             goto done;
911         }
912         if (ofs == 0) {
913             tcg_gen_extract2_i32(ret, arg1, arg2, len);
914             tcg_gen_rotli_i32(ret, ret, len);
915             goto done;
916         }
917     }
918 
919     mask = (1u << len) - 1;
920     if (ofs + len < 32) {
921         tcg_gen_andi_i32(t1, arg2, mask);
922         tcg_gen_shli_i32(t1, t1, ofs);
923     } else {
924         tcg_gen_shli_i32(t1, arg2, ofs);
925     }
926     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
927     tcg_gen_or_i32(ret, ret, t1);
928  done:
929     tcg_temp_free_i32(t1);
930 }
931 
tcg_gen_deposit_z_i32(TCGv_i32 ret,TCGv_i32 arg,unsigned int ofs,unsigned int len)932 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
933                            unsigned int ofs, unsigned int len)
934 {
935     tcg_debug_assert(ofs < 32);
936     tcg_debug_assert(len > 0);
937     tcg_debug_assert(len <= 32);
938     tcg_debug_assert(ofs + len <= 32);
939 
940     if (ofs + len == 32) {
941         tcg_gen_shli_i32(ret, arg, ofs);
942     } else if (ofs == 0) {
943         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
944     } else if (TCG_TARGET_deposit_valid(TCG_TYPE_I32, ofs, len)) {
945         TCGv_i32 zero = tcg_constant_i32(0);
946         tcg_gen_op5ii_i32(INDEX_op_deposit, ret, zero, arg, ofs, len);
947     } else {
948         /*
949          * To help two-operand hosts we prefer to zero-extend first,
950          * which allows ARG to stay live.
951          */
952         if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, len)) {
953             tcg_gen_extract_i32(ret, arg, 0, len);
954             tcg_gen_shli_i32(ret, ret, ofs);
955             return;
956         }
957         /* Otherwise prefer zero-extension over AND for code size.  */
958         if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, ofs + len)) {
959             tcg_gen_shli_i32(ret, arg, ofs);
960             tcg_gen_extract_i32(ret, ret, 0, ofs + len);
961             return;
962         }
963         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
964         tcg_gen_shli_i32(ret, ret, ofs);
965     }
966 }
967 
tcg_gen_extract_i32(TCGv_i32 ret,TCGv_i32 arg,unsigned int ofs,unsigned int len)968 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
969                          unsigned int ofs, unsigned int len)
970 {
971     tcg_debug_assert(ofs < 32);
972     tcg_debug_assert(len > 0);
973     tcg_debug_assert(len <= 32);
974     tcg_debug_assert(ofs + len <= 32);
975 
976     /* Canonicalize certain special cases, even if extract is supported.  */
977     if (ofs + len == 32) {
978         tcg_gen_shri_i32(ret, arg, 32 - len);
979         return;
980     }
981 
982     if (TCG_TARGET_extract_valid(TCG_TYPE_I32, ofs, len)) {
983         tcg_gen_op4ii_i32(INDEX_op_extract, ret, arg, ofs, len);
984         return;
985     }
986     if (ofs == 0) {
987         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
988         return;
989     }
990 
991     /* Assume that zero-extension, if available, is cheaper than a shift.  */
992     if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, ofs + len)) {
993         tcg_gen_op4ii_i32(INDEX_op_extract, ret, arg, 0, ofs + len);
994         tcg_gen_shri_i32(ret, ret, ofs);
995         return;
996     }
997 
998     /* ??? Ideally we'd know what values are available for immediate AND.
999        Assume that 8 bits are available, plus the special case of 16,
1000        so that we get ext8u, ext16u.  */
1001     switch (len) {
1002     case 1 ... 8: case 16:
1003         tcg_gen_shri_i32(ret, arg, ofs);
1004         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
1005         break;
1006     default:
1007         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1008         tcg_gen_shri_i32(ret, ret, 32 - len);
1009         break;
1010     }
1011 }
1012 
tcg_gen_sextract_i32(TCGv_i32 ret,TCGv_i32 arg,unsigned int ofs,unsigned int len)1013 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
1014                           unsigned int ofs, unsigned int len)
1015 {
1016     tcg_debug_assert(ofs < 32);
1017     tcg_debug_assert(len > 0);
1018     tcg_debug_assert(len <= 32);
1019     tcg_debug_assert(ofs + len <= 32);
1020 
1021     /* Canonicalize certain special cases, even if extract is supported.  */
1022     if (ofs + len == 32) {
1023         tcg_gen_sari_i32(ret, arg, 32 - len);
1024         return;
1025     }
1026 
1027     if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, ofs, len)) {
1028         tcg_gen_op4ii_i32(INDEX_op_sextract, ret, arg, ofs, len);
1029         return;
1030     }
1031 
1032     /* Assume that sign-extension, if available, is cheaper than a shift.  */
1033     if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, 0, ofs + len)) {
1034         tcg_gen_op4ii_i32(INDEX_op_sextract, ret, arg, 0, ofs + len);
1035         tcg_gen_sari_i32(ret, ret, ofs);
1036         return;
1037     }
1038     if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, 0, len)) {
1039         tcg_gen_shri_i32(ret, arg, ofs);
1040         tcg_gen_op4ii_i32(INDEX_op_sextract, ret, ret, 0, len);
1041         return;
1042     }
1043 
1044     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1045     tcg_gen_sari_i32(ret, ret, 32 - len);
1046 }
1047 
1048 /*
1049  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1050  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1051  */
tcg_gen_extract2_i32(TCGv_i32 ret,TCGv_i32 al,TCGv_i32 ah,unsigned int ofs)1052 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
1053                           unsigned int ofs)
1054 {
1055     tcg_debug_assert(ofs <= 32);
1056     if (ofs == 0) {
1057         tcg_gen_mov_i32(ret, al);
1058     } else if (ofs == 32) {
1059         tcg_gen_mov_i32(ret, ah);
1060     } else if (al == ah) {
1061         tcg_gen_rotri_i32(ret, al, ofs);
1062     } else if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
1063         tcg_gen_op4i_i32(INDEX_op_extract2, ret, al, ah, ofs);
1064     } else {
1065         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1066         tcg_gen_shri_i32(t0, al, ofs);
1067         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
1068         tcg_temp_free_i32(t0);
1069     }
1070 }
1071 
tcg_gen_movcond_i32(TCGCond cond,TCGv_i32 ret,TCGv_i32 c1,TCGv_i32 c2,TCGv_i32 v1,TCGv_i32 v2)1072 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
1073                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
1074 {
1075     if (cond == TCG_COND_ALWAYS) {
1076         tcg_gen_mov_i32(ret, v1);
1077     } else if (cond == TCG_COND_NEVER) {
1078         tcg_gen_mov_i32(ret, v2);
1079     } else {
1080         tcg_gen_op6i_i32(INDEX_op_movcond, ret, c1, c2, v1, v2, cond);
1081     }
1082 }
1083 
tcg_gen_add2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 al,TCGv_i32 ah,TCGv_i32 bl,TCGv_i32 bh)1084 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1085                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1086 {
1087     if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I32, 0)) {
1088         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1089         tcg_gen_op3_i32(INDEX_op_addco, t0, al, bl);
1090         tcg_gen_op3_i32(INDEX_op_addci, rh, ah, bh);
1091         tcg_gen_mov_i32(rl, t0);
1092         tcg_temp_free_i32(t0);
1093     } else {
1094         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1095         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1096         tcg_gen_add_i32(t0, al, bl);
1097         tcg_gen_setcond_i32(TCG_COND_LTU, t1, t0, al);
1098         tcg_gen_add_i32(rh, ah, bh);
1099         tcg_gen_add_i32(rh, rh, t1);
1100         tcg_gen_mov_i32(rl, t0);
1101         tcg_temp_free_i32(t0);
1102         tcg_temp_free_i32(t1);
1103     }
1104 }
1105 
tcg_gen_addcio_i32(TCGv_i32 r,TCGv_i32 co,TCGv_i32 a,TCGv_i32 b,TCGv_i32 ci)1106 void tcg_gen_addcio_i32(TCGv_i32 r, TCGv_i32 co,
1107                         TCGv_i32 a, TCGv_i32 b, TCGv_i32 ci)
1108 {
1109     if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I32, 0)) {
1110         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1111         TCGv_i32 zero = tcg_constant_i32(0);
1112         TCGv_i32 mone = tcg_constant_i32(-1);
1113 
1114         tcg_gen_op3_i32(INDEX_op_addco, t0, ci, mone);
1115         tcg_gen_op3_i32(INDEX_op_addcio, r, a, b);
1116         tcg_gen_op3_i32(INDEX_op_addci, co, zero, zero);
1117         tcg_temp_free_i32(t0);
1118     } else {
1119         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1120         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1121 
1122         tcg_gen_add_i32(t0, a, b);
1123         tcg_gen_setcond_i32(TCG_COND_LTU, t1, t0, a);
1124         tcg_gen_add_i32(r, t0, ci);
1125         tcg_gen_setcond_i32(TCG_COND_LTU, t0, r, t0);
1126         tcg_gen_or_i32(co, t0, t1);
1127 
1128         tcg_temp_free_i32(t0);
1129         tcg_temp_free_i32(t1);
1130     }
1131 }
1132 
tcg_gen_sub2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 al,TCGv_i32 ah,TCGv_i32 bl,TCGv_i32 bh)1133 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1134                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1135 {
1136     if (tcg_op_supported(INDEX_op_subbi, TCG_TYPE_I32, 0)) {
1137         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1138         tcg_gen_op3_i32(INDEX_op_subbo, t0, al, bl);
1139         tcg_gen_op3_i32(INDEX_op_subbi, rh, ah, bh);
1140         tcg_gen_mov_i32(rl, t0);
1141         tcg_temp_free_i32(t0);
1142     } else {
1143         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1144         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1145         tcg_gen_sub_i32(t0, al, bl);
1146         tcg_gen_setcond_i32(TCG_COND_LTU, t1, al, bl);
1147         tcg_gen_sub_i32(rh, ah, bh);
1148         tcg_gen_sub_i32(rh, rh, t1);
1149         tcg_gen_mov_i32(rl, t0);
1150         tcg_temp_free_i32(t0);
1151         tcg_temp_free_i32(t1);
1152     }
1153 }
1154 
tcg_gen_mulu2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 arg1,TCGv_i32 arg2)1155 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1156 {
1157     if (tcg_op_supported(INDEX_op_mulu2, TCG_TYPE_I32, 0)) {
1158         tcg_gen_op4_i32(INDEX_op_mulu2, rl, rh, arg1, arg2);
1159     } else if (tcg_op_supported(INDEX_op_muluh, TCG_TYPE_I32, 0)) {
1160         TCGv_i32 t = tcg_temp_ebb_new_i32();
1161         tcg_gen_op3_i32(INDEX_op_mul, t, arg1, arg2);
1162         tcg_gen_op3_i32(INDEX_op_muluh, rh, arg1, arg2);
1163         tcg_gen_mov_i32(rl, t);
1164         tcg_temp_free_i32(t);
1165     } else if (TCG_TARGET_REG_BITS == 64) {
1166         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1167         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1168         tcg_gen_extu_i32_i64(t0, arg1);
1169         tcg_gen_extu_i32_i64(t1, arg2);
1170         tcg_gen_mul_i64(t0, t0, t1);
1171         tcg_gen_extr_i64_i32(rl, rh, t0);
1172         tcg_temp_free_i64(t0);
1173         tcg_temp_free_i64(t1);
1174     } else {
1175         g_assert_not_reached();
1176     }
1177 }
1178 
tcg_gen_muls2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 arg1,TCGv_i32 arg2)1179 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1180 {
1181     if (tcg_op_supported(INDEX_op_muls2, TCG_TYPE_I32, 0)) {
1182         tcg_gen_op4_i32(INDEX_op_muls2, rl, rh, arg1, arg2);
1183     } else if (tcg_op_supported(INDEX_op_mulsh, TCG_TYPE_I32, 0)) {
1184         TCGv_i32 t = tcg_temp_ebb_new_i32();
1185         tcg_gen_op3_i32(INDEX_op_mul, t, arg1, arg2);
1186         tcg_gen_op3_i32(INDEX_op_mulsh, rh, arg1, arg2);
1187         tcg_gen_mov_i32(rl, t);
1188         tcg_temp_free_i32(t);
1189     } else if (TCG_TARGET_REG_BITS == 32) {
1190         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1191         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1192         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1193         TCGv_i32 t3 = tcg_temp_ebb_new_i32();
1194         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1195         /* Adjust for negative inputs.  */
1196         tcg_gen_sari_i32(t2, arg1, 31);
1197         tcg_gen_sari_i32(t3, arg2, 31);
1198         tcg_gen_and_i32(t2, t2, arg2);
1199         tcg_gen_and_i32(t3, t3, arg1);
1200         tcg_gen_sub_i32(rh, t1, t2);
1201         tcg_gen_sub_i32(rh, rh, t3);
1202         tcg_gen_mov_i32(rl, t0);
1203         tcg_temp_free_i32(t0);
1204         tcg_temp_free_i32(t1);
1205         tcg_temp_free_i32(t2);
1206         tcg_temp_free_i32(t3);
1207     } else {
1208         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1209         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1210         tcg_gen_ext_i32_i64(t0, arg1);
1211         tcg_gen_ext_i32_i64(t1, arg2);
1212         tcg_gen_mul_i64(t0, t0, t1);
1213         tcg_gen_extr_i64_i32(rl, rh, t0);
1214         tcg_temp_free_i64(t0);
1215         tcg_temp_free_i64(t1);
1216     }
1217 }
1218 
tcg_gen_mulsu2_i32(TCGv_i32 rl,TCGv_i32 rh,TCGv_i32 arg1,TCGv_i32 arg2)1219 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1220 {
1221     if (TCG_TARGET_REG_BITS == 32) {
1222         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1223         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1224         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1225         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1226         /* Adjust for negative input for the signed arg1.  */
1227         tcg_gen_sari_i32(t2, arg1, 31);
1228         tcg_gen_and_i32(t2, t2, arg2);
1229         tcg_gen_sub_i32(rh, t1, t2);
1230         tcg_gen_mov_i32(rl, t0);
1231         tcg_temp_free_i32(t0);
1232         tcg_temp_free_i32(t1);
1233         tcg_temp_free_i32(t2);
1234     } else {
1235         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1236         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1237         tcg_gen_ext_i32_i64(t0, arg1);
1238         tcg_gen_extu_i32_i64(t1, arg2);
1239         tcg_gen_mul_i64(t0, t0, t1);
1240         tcg_gen_extr_i64_i32(rl, rh, t0);
1241         tcg_temp_free_i64(t0);
1242         tcg_temp_free_i64(t1);
1243     }
1244 }
1245 
tcg_gen_ext8s_i32(TCGv_i32 ret,TCGv_i32 arg)1246 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1247 {
1248     tcg_gen_sextract_i32(ret, arg, 0, 8);
1249 }
1250 
tcg_gen_ext16s_i32(TCGv_i32 ret,TCGv_i32 arg)1251 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1252 {
1253     tcg_gen_sextract_i32(ret, arg, 0, 16);
1254 }
1255 
tcg_gen_ext8u_i32(TCGv_i32 ret,TCGv_i32 arg)1256 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1257 {
1258     tcg_gen_extract_i32(ret, arg, 0, 8);
1259 }
1260 
tcg_gen_ext16u_i32(TCGv_i32 ret,TCGv_i32 arg)1261 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1262 {
1263     tcg_gen_extract_i32(ret, arg, 0, 16);
1264 }
1265 
1266 /*
1267  * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1268  *
1269  * Byte pattern: xxab -> yyba
1270  *
1271  * With TCG_BSWAP_IZ, x == zero, else undefined.
1272  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1273  */
tcg_gen_bswap16_i32(TCGv_i32 ret,TCGv_i32 arg,int flags)1274 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1275 {
1276     /* Only one extension flag may be present. */
1277     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1278 
1279     if (tcg_op_supported(INDEX_op_bswap16, TCG_TYPE_I32, 0)) {
1280         tcg_gen_op3i_i32(INDEX_op_bswap16, ret, arg, flags);
1281     } else {
1282         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1283         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1284 
1285                                             /* arg = ..ab (IZ) xxab (!IZ) */
1286         tcg_gen_shri_i32(t0, arg, 8);       /*  t0 = ...a (IZ) .xxa (!IZ) */
1287         if (!(flags & TCG_BSWAP_IZ)) {
1288             tcg_gen_ext8u_i32(t0, t0);      /*  t0 = ...a */
1289         }
1290 
1291         if (flags & TCG_BSWAP_OS) {
1292             tcg_gen_shli_i32(t1, arg, 24);  /*  t1 = b... */
1293             tcg_gen_sari_i32(t1, t1, 16);   /*  t1 = ssb. */
1294         } else if (flags & TCG_BSWAP_OZ) {
1295             tcg_gen_ext8u_i32(t1, arg);     /*  t1 = ...b */
1296             tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = ..b. */
1297         } else {
1298             tcg_gen_shli_i32(t1, arg, 8);   /*  t1 = xab. */
1299         }
1300 
1301         tcg_gen_or_i32(ret, t0, t1);        /* ret = ..ba (OZ) */
1302                                             /*     = ssba (OS) */
1303                                             /*     = xaba (no flag) */
1304         tcg_temp_free_i32(t0);
1305         tcg_temp_free_i32(t1);
1306     }
1307 }
1308 
1309 /*
1310  * bswap32_i32: 32-bit byte swap on a 32-bit value.
1311  *
1312  * Byte pattern: abcd -> dcba
1313  */
tcg_gen_bswap32_i32(TCGv_i32 ret,TCGv_i32 arg)1314 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1315 {
1316     if (tcg_op_supported(INDEX_op_bswap32, TCG_TYPE_I32, 0)) {
1317         tcg_gen_op3i_i32(INDEX_op_bswap32, ret, arg, 0);
1318     } else {
1319         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1320         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1321         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1322 
1323                                         /* arg = abcd */
1324         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1325         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1326         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1327         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1328         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1329 
1330         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1331         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1332         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1333 
1334         tcg_temp_free_i32(t0);
1335         tcg_temp_free_i32(t1);
1336     }
1337 }
1338 
1339 /*
1340  * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1341  *
1342  * Byte pattern: abcd -> cdab
1343  */
tcg_gen_hswap_i32(TCGv_i32 ret,TCGv_i32 arg)1344 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1345 {
1346     /* Swapping 2 16-bit elements is a rotate. */
1347     tcg_gen_rotli_i32(ret, arg, 16);
1348 }
1349 
tcg_gen_smin_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1350 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1351 {
1352     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1353 }
1354 
tcg_gen_umin_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1355 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1356 {
1357     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1358 }
1359 
tcg_gen_smax_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1360 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1361 {
1362     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1363 }
1364 
tcg_gen_umax_i32(TCGv_i32 ret,TCGv_i32 a,TCGv_i32 b)1365 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1366 {
1367     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1368 }
1369 
tcg_gen_abs_i32(TCGv_i32 ret,TCGv_i32 a)1370 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1371 {
1372     TCGv_i32 t = tcg_temp_ebb_new_i32();
1373 
1374     tcg_gen_sari_i32(t, a, 31);
1375     tcg_gen_xor_i32(ret, a, t);
1376     tcg_gen_sub_i32(ret, ret, t);
1377     tcg_temp_free_i32(t);
1378 }
1379 
tcg_gen_ld8u_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1380 void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1381 {
1382     tcg_gen_ldst_op_i32(INDEX_op_ld8u, ret, arg2, offset);
1383 }
1384 
tcg_gen_ld8s_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1385 void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1386 {
1387     tcg_gen_ldst_op_i32(INDEX_op_ld8s, ret, arg2, offset);
1388 }
1389 
tcg_gen_ld16u_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1390 void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1391 {
1392     tcg_gen_ldst_op_i32(INDEX_op_ld16u, ret, arg2, offset);
1393 }
1394 
tcg_gen_ld16s_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1395 void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1396 {
1397     tcg_gen_ldst_op_i32(INDEX_op_ld16s, ret, arg2, offset);
1398 }
1399 
tcg_gen_ld_i32(TCGv_i32 ret,TCGv_ptr arg2,tcg_target_long offset)1400 void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1401 {
1402     tcg_gen_ldst_op_i32(INDEX_op_ld, ret, arg2, offset);
1403 }
1404 
tcg_gen_st8_i32(TCGv_i32 arg1,TCGv_ptr arg2,tcg_target_long offset)1405 void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1406 {
1407     tcg_gen_ldst_op_i32(INDEX_op_st8, arg1, arg2, offset);
1408 }
1409 
tcg_gen_st16_i32(TCGv_i32 arg1,TCGv_ptr arg2,tcg_target_long offset)1410 void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1411 {
1412     tcg_gen_ldst_op_i32(INDEX_op_st16, arg1, arg2, offset);
1413 }
1414 
tcg_gen_st_i32(TCGv_i32 arg1,TCGv_ptr arg2,tcg_target_long offset)1415 void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1416 {
1417     tcg_gen_ldst_op_i32(INDEX_op_st, arg1, arg2, offset);
1418 }
1419 
1420 
1421 /* 64-bit ops */
1422 
tcg_gen_discard_i64(TCGv_i64 arg)1423 void tcg_gen_discard_i64(TCGv_i64 arg)
1424 {
1425     if (TCG_TARGET_REG_BITS == 64) {
1426         tcg_gen_op1_i64(INDEX_op_discard, TCG_TYPE_I64, arg);
1427     } else {
1428         tcg_gen_discard_i32(TCGV_LOW(arg));
1429         tcg_gen_discard_i32(TCGV_HIGH(arg));
1430     }
1431 }
1432 
tcg_gen_mov_i64(TCGv_i64 ret,TCGv_i64 arg)1433 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1434 {
1435     if (ret == arg) {
1436         return;
1437     }
1438     if (TCG_TARGET_REG_BITS == 64) {
1439         tcg_gen_op2_i64(INDEX_op_mov, ret, arg);
1440     } else {
1441         TCGTemp *ts = tcgv_i64_temp(arg);
1442 
1443         /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1444         if (ts->kind == TEMP_CONST) {
1445             tcg_gen_movi_i64(ret, ts->val);
1446         } else {
1447             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1448             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1449         }
1450     }
1451 }
1452 
tcg_gen_movi_i64(TCGv_i64 ret,int64_t arg)1453 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1454 {
1455     if (TCG_TARGET_REG_BITS == 64) {
1456         tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1457     } else {
1458         tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1459         tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1460     }
1461 }
1462 
tcg_gen_ld8u_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1463 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1464 {
1465     if (TCG_TARGET_REG_BITS == 64) {
1466         tcg_gen_ldst_op_i64(INDEX_op_ld8u, ret, arg2, offset);
1467     } else {
1468         tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1469         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1470     }
1471 }
1472 
tcg_gen_ld8s_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1473 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1474 {
1475     if (TCG_TARGET_REG_BITS == 64) {
1476         tcg_gen_ldst_op_i64(INDEX_op_ld8s, ret, arg2, offset);
1477     } else {
1478         tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1479         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1480     }
1481 }
1482 
tcg_gen_ld16u_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1483 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1484 {
1485     if (TCG_TARGET_REG_BITS == 64) {
1486         tcg_gen_ldst_op_i64(INDEX_op_ld16u, ret, arg2, offset);
1487     } else {
1488         tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1489         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1490     }
1491 }
1492 
tcg_gen_ld16s_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1493 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1494 {
1495     if (TCG_TARGET_REG_BITS == 64) {
1496         tcg_gen_ldst_op_i64(INDEX_op_ld16s, ret, arg2, offset);
1497     } else {
1498         tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1499         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1500     }
1501 }
1502 
tcg_gen_ld32u_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1503 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1504 {
1505     if (TCG_TARGET_REG_BITS == 64) {
1506         tcg_gen_ldst_op_i64(INDEX_op_ld32u, ret, arg2, offset);
1507     } else {
1508         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1509         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1510     }
1511 }
1512 
tcg_gen_ld32s_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1513 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1514 {
1515     if (TCG_TARGET_REG_BITS == 64) {
1516         tcg_gen_ldst_op_i64(INDEX_op_ld32s, ret, arg2, offset);
1517     } else {
1518         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1519         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1520     }
1521 }
1522 
tcg_gen_ld_i64(TCGv_i64 ret,TCGv_ptr arg2,tcg_target_long offset)1523 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1524 {
1525     /*
1526      * For 32-bit host, since arg2 and ret have different types,
1527      * they cannot be the same temporary -- no chance of overlap.
1528      */
1529     if (TCG_TARGET_REG_BITS == 64) {
1530         tcg_gen_ldst_op_i64(INDEX_op_ld, ret, arg2, offset);
1531     } else if (HOST_BIG_ENDIAN) {
1532         tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1533         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1534     } else {
1535         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1536         tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1537     }
1538 }
1539 
tcg_gen_st8_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1540 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1541 {
1542     if (TCG_TARGET_REG_BITS == 64) {
1543         tcg_gen_ldst_op_i64(INDEX_op_st8, arg1, arg2, offset);
1544     } else {
1545         tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1546     }
1547 }
1548 
tcg_gen_st16_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1549 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1550 {
1551     if (TCG_TARGET_REG_BITS == 64) {
1552         tcg_gen_ldst_op_i64(INDEX_op_st16, arg1, arg2, offset);
1553     } else {
1554         tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1555     }
1556 }
1557 
tcg_gen_st32_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1558 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1559 {
1560     if (TCG_TARGET_REG_BITS == 64) {
1561         tcg_gen_ldst_op_i64(INDEX_op_st32, arg1, arg2, offset);
1562     } else {
1563         tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1564     }
1565 }
1566 
tcg_gen_st_i64(TCGv_i64 arg1,TCGv_ptr arg2,tcg_target_long offset)1567 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1568 {
1569     if (TCG_TARGET_REG_BITS == 64) {
1570         tcg_gen_ldst_op_i64(INDEX_op_st, arg1, arg2, offset);
1571     } else if (HOST_BIG_ENDIAN) {
1572         tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1573         tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1574     } else {
1575         tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1576         tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1577     }
1578 }
1579 
tcg_gen_add_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1580 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1581 {
1582     if (TCG_TARGET_REG_BITS == 64) {
1583         tcg_gen_op3_i64(INDEX_op_add, ret, arg1, arg2);
1584     } else {
1585         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1586                          TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1587     }
1588 }
1589 
tcg_gen_sub_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1590 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1591 {
1592     if (TCG_TARGET_REG_BITS == 64) {
1593         tcg_gen_op3_i64(INDEX_op_sub, ret, arg1, arg2);
1594     } else {
1595         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1596                          TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1597     }
1598 }
1599 
tcg_gen_and_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1600 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1601 {
1602     if (TCG_TARGET_REG_BITS == 64) {
1603         tcg_gen_op3_i64(INDEX_op_and, ret, arg1, arg2);
1604     } else {
1605         tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1606         tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1607     }
1608 }
1609 
tcg_gen_or_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1610 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1611 {
1612     if (TCG_TARGET_REG_BITS == 64) {
1613         tcg_gen_op3_i64(INDEX_op_or, ret, arg1, arg2);
1614     } else {
1615         tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1616         tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1617     }
1618 }
1619 
tcg_gen_xor_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1620 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1621 {
1622     if (TCG_TARGET_REG_BITS == 64) {
1623         tcg_gen_op3_i64(INDEX_op_xor, ret, arg1, arg2);
1624     } else {
1625         tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1626         tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1627     }
1628 }
1629 
tcg_gen_shl_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1630 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1631 {
1632     if (TCG_TARGET_REG_BITS == 64) {
1633         tcg_gen_op3_i64(INDEX_op_shl, ret, arg1, arg2);
1634     } else {
1635         gen_helper_shl_i64(ret, arg1, arg2);
1636     }
1637 }
1638 
tcg_gen_shr_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1639 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1640 {
1641     if (TCG_TARGET_REG_BITS == 64) {
1642         tcg_gen_op3_i64(INDEX_op_shr, ret, arg1, arg2);
1643     } else {
1644         gen_helper_shr_i64(ret, arg1, arg2);
1645     }
1646 }
1647 
tcg_gen_sar_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1648 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1649 {
1650     if (TCG_TARGET_REG_BITS == 64) {
1651         tcg_gen_op3_i64(INDEX_op_sar, ret, arg1, arg2);
1652     } else {
1653         gen_helper_sar_i64(ret, arg1, arg2);
1654     }
1655 }
1656 
tcg_gen_mul_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1657 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1658 {
1659     TCGv_i64 t0;
1660     TCGv_i32 t1;
1661 
1662     if (TCG_TARGET_REG_BITS == 64) {
1663         tcg_gen_op3_i64(INDEX_op_mul, ret, arg1, arg2);
1664         return;
1665     }
1666 
1667 
1668     t0 = tcg_temp_ebb_new_i64();
1669     t1 = tcg_temp_ebb_new_i32();
1670 
1671     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1672                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1673 
1674     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1675     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1676     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1677     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1678 
1679     tcg_gen_mov_i64(ret, t0);
1680     tcg_temp_free_i64(t0);
1681     tcg_temp_free_i32(t1);
1682 }
1683 
tcg_gen_addi_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1684 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1685 {
1686     /* some cases can be optimized here */
1687     if (arg2 == 0) {
1688         tcg_gen_mov_i64(ret, arg1);
1689     } else if (TCG_TARGET_REG_BITS == 64) {
1690         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1691     } else {
1692         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1693                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1694                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1695     }
1696 }
1697 
tcg_gen_subfi_i64(TCGv_i64 ret,int64_t arg1,TCGv_i64 arg2)1698 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1699 {
1700     if (arg1 == 0) {
1701         tcg_gen_neg_i64(ret, arg2);
1702     } else if (TCG_TARGET_REG_BITS == 64) {
1703         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1704     } else {
1705         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1706                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1707                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1708     }
1709 }
1710 
tcg_gen_subi_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1711 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1712 {
1713     tcg_gen_addi_i64(ret, arg1, -arg2);
1714 }
1715 
tcg_gen_neg_i64(TCGv_i64 ret,TCGv_i64 arg)1716 void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1717 {
1718     if (TCG_TARGET_REG_BITS == 64) {
1719         tcg_gen_op2_i64(INDEX_op_neg, ret, arg);
1720     } else {
1721         TCGv_i32 zero = tcg_constant_i32(0);
1722         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1723                          zero, zero, TCGV_LOW(arg), TCGV_HIGH(arg));
1724     }
1725 }
1726 
tcg_gen_andi_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1727 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1728 {
1729     if (TCG_TARGET_REG_BITS == 32) {
1730         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1731         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1732         return;
1733     }
1734 
1735     /* Some cases can be optimized here.  */
1736     switch (arg2) {
1737     case 0:
1738         tcg_gen_movi_i64(ret, 0);
1739         return;
1740     case -1:
1741         tcg_gen_mov_i64(ret, arg1);
1742         return;
1743     default:
1744         /*
1745          * Canonicalize on extract, if valid.  This aids x86 with its
1746          * 2 operand MOVZBL and 2 operand AND, selecting the TCGOpcode
1747          * which does not require matching operands.  Other backends can
1748          * trivially expand the extract to AND during code generation.
1749          */
1750         if (!(arg2 & (arg2 + 1))) {
1751             unsigned len = ctz64(~arg2);
1752             if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, len)) {
1753                 tcg_gen_extract_i64(ret, arg1, 0, len);
1754                 return;
1755             }
1756         }
1757         break;
1758     }
1759 
1760     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1761 }
1762 
tcg_gen_ori_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1763 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1764 {
1765     if (TCG_TARGET_REG_BITS == 32) {
1766         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1767         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1768         return;
1769     }
1770     /* Some cases can be optimized here.  */
1771     if (arg2 == -1) {
1772         tcg_gen_movi_i64(ret, -1);
1773     } else if (arg2 == 0) {
1774         tcg_gen_mov_i64(ret, arg1);
1775     } else {
1776         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1777     }
1778 }
1779 
tcg_gen_xori_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1780 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1781 {
1782     if (TCG_TARGET_REG_BITS == 32) {
1783         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1784         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1785         return;
1786     }
1787     /* Some cases can be optimized here.  */
1788     if (arg2 == 0) {
1789         tcg_gen_mov_i64(ret, arg1);
1790     } else if (arg2 == -1 &&
1791                tcg_op_supported(INDEX_op_not, TCG_TYPE_I64, 0)) {
1792         /* Don't recurse with tcg_gen_not_i64.  */
1793         tcg_gen_op2_i64(INDEX_op_not, ret, arg1);
1794     } else {
1795         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1796     }
1797 }
1798 
tcg_gen_shifti_i64(TCGv_i64 ret,TCGv_i64 arg1,unsigned c,bool right,bool arith)1799 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1800                                       unsigned c, bool right, bool arith)
1801 {
1802     tcg_debug_assert(c < 64);
1803     if (c == 0) {
1804         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1805         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1806     } else if (c >= 32) {
1807         c -= 32;
1808         if (right) {
1809             if (arith) {
1810                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1811                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1812             } else {
1813                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1814                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1815             }
1816         } else {
1817             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1818             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1819         }
1820     } else if (right) {
1821         if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
1822             tcg_gen_extract2_i32(TCGV_LOW(ret),
1823                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1824         } else {
1825             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1826             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1827                                 TCGV_HIGH(arg1), 32 - c, c);
1828         }
1829         if (arith) {
1830             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1831         } else {
1832             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1833         }
1834     } else {
1835         if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
1836             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1837                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1838         } else {
1839             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1840             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1841             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1842                                 TCGV_HIGH(arg1), c, 32 - c);
1843             tcg_temp_free_i32(t0);
1844         }
1845         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1846     }
1847 }
1848 
tcg_gen_shli_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1849 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1850 {
1851     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1852     if (TCG_TARGET_REG_BITS == 32) {
1853         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1854     } else if (arg2 == 0) {
1855         tcg_gen_mov_i64(ret, arg1);
1856     } else {
1857         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1858     }
1859 }
1860 
tcg_gen_shri_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1861 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1862 {
1863     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1864     if (TCG_TARGET_REG_BITS == 32) {
1865         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1866     } else if (arg2 == 0) {
1867         tcg_gen_mov_i64(ret, arg1);
1868     } else {
1869         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1870     }
1871 }
1872 
tcg_gen_sari_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1873 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1874 {
1875     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1876     if (TCG_TARGET_REG_BITS == 32) {
1877         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1878     } else if (arg2 == 0) {
1879         tcg_gen_mov_i64(ret, arg1);
1880     } else {
1881         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1882     }
1883 }
1884 
tcg_gen_brcond_i64(TCGCond cond,TCGv_i64 arg1,TCGv_i64 arg2,TCGLabel * l)1885 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1886 {
1887     if (cond == TCG_COND_ALWAYS) {
1888         tcg_gen_br(l);
1889     } else if (cond != TCG_COND_NEVER) {
1890         TCGOp *op;
1891         if (TCG_TARGET_REG_BITS == 32) {
1892             op = tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1893                                    TCGV_HIGH(arg1), TCGV_LOW(arg2),
1894                                    TCGV_HIGH(arg2), cond, label_arg(l));
1895         } else {
1896             op = tcg_gen_op4ii_i64(INDEX_op_brcond, arg1, arg2, cond,
1897                                    label_arg(l));
1898         }
1899         add_as_label_use(l, op);
1900     }
1901 }
1902 
tcg_gen_brcondi_i64(TCGCond cond,TCGv_i64 arg1,int64_t arg2,TCGLabel * l)1903 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1904 {
1905     if (TCG_TARGET_REG_BITS == 64) {
1906         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1907     } else if (cond == TCG_COND_ALWAYS) {
1908         tcg_gen_br(l);
1909     } else if (cond != TCG_COND_NEVER) {
1910         TCGOp *op = tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1911                                       TCGV_LOW(arg1), TCGV_HIGH(arg1),
1912                                       tcg_constant_i32(arg2),
1913                                       tcg_constant_i32(arg2 >> 32),
1914                                       cond, label_arg(l));
1915         add_as_label_use(l, op);
1916     }
1917 }
1918 
tcg_gen_setcond_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1919 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1920                          TCGv_i64 arg1, TCGv_i64 arg2)
1921 {
1922     if (cond == TCG_COND_ALWAYS) {
1923         tcg_gen_movi_i64(ret, 1);
1924     } else if (cond == TCG_COND_NEVER) {
1925         tcg_gen_movi_i64(ret, 0);
1926     } else {
1927         if (TCG_TARGET_REG_BITS == 32) {
1928             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1929                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1930                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1931             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1932         } else {
1933             tcg_gen_op4i_i64(INDEX_op_setcond, ret, arg1, arg2, cond);
1934         }
1935     }
1936 }
1937 
tcg_gen_setcondi_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1938 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1939                           TCGv_i64 arg1, int64_t arg2)
1940 {
1941     if (TCG_TARGET_REG_BITS == 64) {
1942         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1943     } else if (cond == TCG_COND_ALWAYS) {
1944         tcg_gen_movi_i64(ret, 1);
1945     } else if (cond == TCG_COND_NEVER) {
1946         tcg_gen_movi_i64(ret, 0);
1947     } else {
1948         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1949                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1950                          tcg_constant_i32(arg2),
1951                          tcg_constant_i32(arg2 >> 32), cond);
1952         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1953     }
1954 }
1955 
tcg_gen_negsetcondi_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1956 void tcg_gen_negsetcondi_i64(TCGCond cond, TCGv_i64 ret,
1957                              TCGv_i64 arg1, int64_t arg2)
1958 {
1959     tcg_gen_negsetcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1960 }
1961 
tcg_gen_negsetcond_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1962 void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
1963                             TCGv_i64 arg1, TCGv_i64 arg2)
1964 {
1965     if (cond == TCG_COND_ALWAYS) {
1966         tcg_gen_movi_i64(ret, -1);
1967     } else if (cond == TCG_COND_NEVER) {
1968         tcg_gen_movi_i64(ret, 0);
1969     } else if (TCG_TARGET_REG_BITS == 64) {
1970         tcg_gen_op4i_i64(INDEX_op_negsetcond, ret, arg1, arg2, cond);
1971     } else {
1972         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1973                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1974                          TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1975         tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
1976         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
1977     }
1978 }
1979 
tcg_gen_muli_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)1980 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1981 {
1982     if (arg2 == 0) {
1983         tcg_gen_movi_i64(ret, 0);
1984     } else if (is_power_of_2(arg2)) {
1985         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1986     } else {
1987         tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
1988     }
1989 }
1990 
tcg_gen_div_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)1991 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1992 {
1993     if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I64, 0)) {
1994         tcg_gen_op3_i64(INDEX_op_divs, ret, arg1, arg2);
1995     } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I64, 0)) {
1996         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1997         tcg_gen_sari_i64(t0, arg1, 63);
1998         tcg_gen_op5_i64(INDEX_op_divs2, ret, t0, arg1, t0, arg2);
1999         tcg_temp_free_i64(t0);
2000     } else {
2001         gen_helper_div_i64(ret, arg1, arg2);
2002     }
2003 }
2004 
tcg_gen_rem_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2005 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2006 {
2007     if (tcg_op_supported(INDEX_op_rems, TCG_TYPE_I64, 0)) {
2008         tcg_gen_op3_i64(INDEX_op_rems, ret, arg1, arg2);
2009     } else if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I64, 0)) {
2010         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2011         tcg_gen_op3_i64(INDEX_op_divs, t0, arg1, arg2);
2012         tcg_gen_mul_i64(t0, t0, arg2);
2013         tcg_gen_sub_i64(ret, arg1, t0);
2014         tcg_temp_free_i64(t0);
2015     } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I64, 0)) {
2016         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2017         tcg_gen_sari_i64(t0, arg1, 63);
2018         tcg_gen_op5_i64(INDEX_op_divs2, t0, ret, arg1, t0, arg2);
2019         tcg_temp_free_i64(t0);
2020     } else {
2021         gen_helper_rem_i64(ret, arg1, arg2);
2022     }
2023 }
2024 
tcg_gen_divu_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2025 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2026 {
2027     if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I64, 0)) {
2028         tcg_gen_op3_i64(INDEX_op_divu, ret, arg1, arg2);
2029     } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I64, 0)) {
2030         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2031         TCGv_i64 zero = tcg_constant_i64(0);
2032         tcg_gen_op5_i64(INDEX_op_divu2, ret, t0, arg1, zero, arg2);
2033         tcg_temp_free_i64(t0);
2034     } else {
2035         gen_helper_divu_i64(ret, arg1, arg2);
2036     }
2037 }
2038 
tcg_gen_remu_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2039 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2040 {
2041     if (tcg_op_supported(INDEX_op_remu, TCG_TYPE_I64, 0)) {
2042         tcg_gen_op3_i64(INDEX_op_remu, ret, arg1, arg2);
2043     } else if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I64, 0)) {
2044         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2045         tcg_gen_op3_i64(INDEX_op_divu, t0, arg1, arg2);
2046         tcg_gen_mul_i64(t0, t0, arg2);
2047         tcg_gen_sub_i64(ret, arg1, t0);
2048         tcg_temp_free_i64(t0);
2049     } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I64, 0)) {
2050         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2051         TCGv_i64 zero = tcg_constant_i64(0);
2052         tcg_gen_op5_i64(INDEX_op_divu2, t0, ret, arg1, zero, arg2);
2053         tcg_temp_free_i64(t0);
2054     } else {
2055         gen_helper_remu_i64(ret, arg1, arg2);
2056     }
2057 }
2058 
tcg_gen_ext8s_i64(TCGv_i64 ret,TCGv_i64 arg)2059 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
2060 {
2061     tcg_gen_sextract_i64(ret, arg, 0, 8);
2062 }
2063 
tcg_gen_ext16s_i64(TCGv_i64 ret,TCGv_i64 arg)2064 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
2065 {
2066     tcg_gen_sextract_i64(ret, arg, 0, 16);
2067 }
2068 
tcg_gen_ext32s_i64(TCGv_i64 ret,TCGv_i64 arg)2069 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
2070 {
2071     tcg_gen_sextract_i64(ret, arg, 0, 32);
2072 }
2073 
tcg_gen_ext8u_i64(TCGv_i64 ret,TCGv_i64 arg)2074 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
2075 {
2076     tcg_gen_extract_i64(ret, arg, 0, 8);
2077 }
2078 
tcg_gen_ext16u_i64(TCGv_i64 ret,TCGv_i64 arg)2079 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
2080 {
2081     tcg_gen_extract_i64(ret, arg, 0, 16);
2082 }
2083 
tcg_gen_ext32u_i64(TCGv_i64 ret,TCGv_i64 arg)2084 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
2085 {
2086     tcg_gen_extract_i64(ret, arg, 0, 32);
2087 }
2088 
2089 /*
2090  * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2091  *
2092  * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2093  *
2094  * With TCG_BSWAP_IZ, x == zero, else undefined.
2095  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2096  */
tcg_gen_bswap16_i64(TCGv_i64 ret,TCGv_i64 arg,int flags)2097 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2098 {
2099     /* Only one extension flag may be present. */
2100     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2101 
2102     if (TCG_TARGET_REG_BITS == 32) {
2103         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
2104         if (flags & TCG_BSWAP_OS) {
2105             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2106         } else {
2107             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2108         }
2109     } else if (tcg_op_supported(INDEX_op_bswap16, TCG_TYPE_I64, 0)) {
2110         tcg_gen_op3i_i64(INDEX_op_bswap16, ret, arg, flags);
2111     } else {
2112         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2113         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2114 
2115                                             /* arg = ......ab or xxxxxxab */
2116         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .......a or .xxxxxxa */
2117         if (!(flags & TCG_BSWAP_IZ)) {
2118             tcg_gen_ext8u_i64(t0, t0);      /*  t0 = .......a */
2119         }
2120 
2121         if (flags & TCG_BSWAP_OS) {
2122             tcg_gen_shli_i64(t1, arg, 56);  /*  t1 = b....... */
2123             tcg_gen_sari_i64(t1, t1, 48);   /*  t1 = ssssssb. */
2124         } else if (flags & TCG_BSWAP_OZ) {
2125             tcg_gen_ext8u_i64(t1, arg);     /*  t1 = .......b */
2126             tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = ......b. */
2127         } else {
2128             tcg_gen_shli_i64(t1, arg, 8);   /*  t1 = xxxxxab. */
2129         }
2130 
2131         tcg_gen_or_i64(ret, t0, t1);        /* ret = ......ba (OZ) */
2132                                             /*       ssssssba (OS) */
2133                                             /*       xxxxxaba (no flag) */
2134         tcg_temp_free_i64(t0);
2135         tcg_temp_free_i64(t1);
2136     }
2137 }
2138 
2139 /*
2140  * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2141  *
2142  * Byte pattern: xxxxabcd -> yyyydcba
2143  *
2144  * With TCG_BSWAP_IZ, x == zero, else undefined.
2145  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2146  */
tcg_gen_bswap32_i64(TCGv_i64 ret,TCGv_i64 arg,int flags)2147 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2148 {
2149     /* Only one extension flag may be present. */
2150     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2151 
2152     if (TCG_TARGET_REG_BITS == 32) {
2153         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2154         if (flags & TCG_BSWAP_OS) {
2155             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2156         } else {
2157             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2158         }
2159     } else if (tcg_op_supported(INDEX_op_bswap32, TCG_TYPE_I64, 0)) {
2160         tcg_gen_op3i_i64(INDEX_op_bswap32, ret, arg, flags);
2161     } else {
2162         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2163         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2164         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
2165 
2166                                             /* arg = xxxxabcd */
2167         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
2168         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
2169         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
2170         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
2171         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
2172 
2173         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
2174         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
2175         if (flags & TCG_BSWAP_OS) {
2176             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
2177         } else {
2178             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
2179         }
2180         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba (OS) */
2181                                             /*       ....dcba (else) */
2182 
2183         tcg_temp_free_i64(t0);
2184         tcg_temp_free_i64(t1);
2185     }
2186 }
2187 
2188 /*
2189  * bswap64_i64: 64-bit byte swap on a 64-bit value.
2190  *
2191  * Byte pattern: abcdefgh -> hgfedcba
2192  */
tcg_gen_bswap64_i64(TCGv_i64 ret,TCGv_i64 arg)2193 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
2194 {
2195     if (TCG_TARGET_REG_BITS == 32) {
2196         TCGv_i32 t0, t1;
2197         t0 = tcg_temp_ebb_new_i32();
2198         t1 = tcg_temp_ebb_new_i32();
2199 
2200         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
2201         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
2202         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
2203         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
2204         tcg_temp_free_i32(t0);
2205         tcg_temp_free_i32(t1);
2206     } else if (tcg_op_supported(INDEX_op_bswap64, TCG_TYPE_I64, 0)) {
2207         tcg_gen_op3i_i64(INDEX_op_bswap64, ret, arg, 0);
2208     } else {
2209         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2210         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2211         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2212 
2213                                         /* arg = abcdefgh */
2214         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
2215         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
2216         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
2217         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
2218         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
2219         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
2220 
2221         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
2222         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
2223         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
2224         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
2225         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
2226         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
2227 
2228         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
2229         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
2230         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
2231 
2232         tcg_temp_free_i64(t0);
2233         tcg_temp_free_i64(t1);
2234         tcg_temp_free_i64(t2);
2235     }
2236 }
2237 
2238 /*
2239  * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2240  * See also include/qemu/bitops.h, hswap64.
2241  *
2242  * Byte pattern: abcdefgh -> ghefcdab
2243  */
tcg_gen_hswap_i64(TCGv_i64 ret,TCGv_i64 arg)2244 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2245 {
2246     uint64_t m = 0x0000ffff0000ffffull;
2247     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2248     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2249 
2250                                         /* arg = abcdefgh */
2251     tcg_gen_rotli_i64(t1, arg, 32);     /*  t1 = efghabcd */
2252     tcg_gen_andi_i64(t0, t1, m);        /*  t0 = ..gh..cd */
2253     tcg_gen_shli_i64(t0, t0, 16);       /*  t0 = gh..cd.. */
2254     tcg_gen_shri_i64(t1, t1, 16);       /*  t1 = ..efghab */
2255     tcg_gen_andi_i64(t1, t1, m);        /*  t1 = ..ef..ab */
2256     tcg_gen_or_i64(ret, t0, t1);        /* ret = ghefcdab */
2257 
2258     tcg_temp_free_i64(t0);
2259     tcg_temp_free_i64(t1);
2260 }
2261 
2262 /*
2263  * wswap_i64: Swap 32-bit words within a 64-bit value.
2264  *
2265  * Byte pattern: abcdefgh -> efghabcd
2266  */
tcg_gen_wswap_i64(TCGv_i64 ret,TCGv_i64 arg)2267 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2268 {
2269     /* Swapping 2 32-bit elements is a rotate. */
2270     tcg_gen_rotli_i64(ret, arg, 32);
2271 }
2272 
tcg_gen_not_i64(TCGv_i64 ret,TCGv_i64 arg)2273 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
2274 {
2275     if (TCG_TARGET_REG_BITS == 32) {
2276         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2277         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
2278     } else if (tcg_op_supported(INDEX_op_not, TCG_TYPE_I64, 0)) {
2279         tcg_gen_op2_i64(INDEX_op_not, ret, arg);
2280     } else {
2281         tcg_gen_xori_i64(ret, arg, -1);
2282     }
2283 }
2284 
tcg_gen_andc_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2285 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2286 {
2287     if (TCG_TARGET_REG_BITS == 32) {
2288         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2289         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2290     } else if (tcg_op_supported(INDEX_op_andc, TCG_TYPE_I64, 0)) {
2291         tcg_gen_op3_i64(INDEX_op_andc, ret, arg1, arg2);
2292     } else {
2293         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2294         tcg_gen_not_i64(t0, arg2);
2295         tcg_gen_and_i64(ret, arg1, t0);
2296         tcg_temp_free_i64(t0);
2297     }
2298 }
2299 
tcg_gen_eqv_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2300 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2301 {
2302     if (TCG_TARGET_REG_BITS == 32) {
2303         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2304         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2305     } else if (tcg_op_supported(INDEX_op_eqv, TCG_TYPE_I64, 0)) {
2306         tcg_gen_op3_i64(INDEX_op_eqv, ret, arg1, arg2);
2307     } else {
2308         tcg_gen_xor_i64(ret, arg1, arg2);
2309         tcg_gen_not_i64(ret, ret);
2310     }
2311 }
2312 
tcg_gen_nand_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2313 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2314 {
2315     if (TCG_TARGET_REG_BITS == 32) {
2316         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2317         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2318     } else if (tcg_op_supported(INDEX_op_nand, TCG_TYPE_I64, 0)) {
2319         tcg_gen_op3_i64(INDEX_op_nand, ret, arg1, arg2);
2320     } else {
2321         tcg_gen_and_i64(ret, arg1, arg2);
2322         tcg_gen_not_i64(ret, ret);
2323     }
2324 }
2325 
tcg_gen_nor_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2326 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2327 {
2328     if (TCG_TARGET_REG_BITS == 32) {
2329         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2330         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2331     } else if (tcg_op_supported(INDEX_op_nor, TCG_TYPE_I64, 0)) {
2332         tcg_gen_op3_i64(INDEX_op_nor, ret, arg1, arg2);
2333     } else {
2334         tcg_gen_or_i64(ret, arg1, arg2);
2335         tcg_gen_not_i64(ret, ret);
2336     }
2337 }
2338 
tcg_gen_orc_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2339 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2340 {
2341     if (TCG_TARGET_REG_BITS == 32) {
2342         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2343         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2344     } else if (tcg_op_supported(INDEX_op_orc, TCG_TYPE_I64, 0)) {
2345         tcg_gen_op3_i64(INDEX_op_orc, ret, arg1, arg2);
2346     } else {
2347         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2348         tcg_gen_not_i64(t0, arg2);
2349         tcg_gen_or_i64(ret, arg1, t0);
2350         tcg_temp_free_i64(t0);
2351     }
2352 }
2353 
tcg_gen_clz_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2354 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2355 {
2356     if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
2357         tcg_gen_op3_i64(INDEX_op_clz, ret, arg1, arg2);
2358     } else {
2359         gen_helper_clz_i64(ret, arg1, arg2);
2360     }
2361 }
2362 
tcg_gen_clzi_i64(TCGv_i64 ret,TCGv_i64 arg1,uint64_t arg2)2363 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2364 {
2365     if (TCG_TARGET_REG_BITS == 32
2366         && arg2 <= 0xffffffffu
2367         && tcg_op_supported(INDEX_op_clz, TCG_TYPE_I32, 0)) {
2368         TCGv_i32 t = tcg_temp_ebb_new_i32();
2369         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2370         tcg_gen_addi_i32(t, t, 32);
2371         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2372         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2373         tcg_temp_free_i32(t);
2374     } else {
2375         tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2376     }
2377 }
2378 
tcg_gen_ctz_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2379 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2380 {
2381     TCGv_i64 z, t;
2382 
2383     if (tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I64, 0)) {
2384         tcg_gen_op3_i64(INDEX_op_ctz, ret, arg1, arg2);
2385         return;
2386     }
2387     if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
2388         t = tcg_temp_ebb_new_i64();
2389         tcg_gen_subi_i64(t, arg1, 1);
2390         tcg_gen_andc_i64(t, t, arg1);
2391         tcg_gen_ctpop_i64(t, t);
2392     } else if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
2393         t = tcg_temp_ebb_new_i64();
2394         tcg_gen_neg_i64(t, arg1);
2395         tcg_gen_and_i64(t, t, arg1);
2396         tcg_gen_clzi_i64(t, t, 64);
2397         tcg_gen_xori_i64(t, t, 63);
2398     } else {
2399         gen_helper_ctz_i64(ret, arg1, arg2);
2400         return;
2401     }
2402 
2403     z = tcg_constant_i64(0);
2404     tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2405     tcg_temp_free_i64(t);
2406 }
2407 
tcg_gen_ctzi_i64(TCGv_i64 ret,TCGv_i64 arg1,uint64_t arg2)2408 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2409 {
2410     if (TCG_TARGET_REG_BITS == 32
2411         && arg2 <= 0xffffffffu
2412         && tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I32, 0)) {
2413         TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2414         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2415         tcg_gen_addi_i32(t32, t32, 32);
2416         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2417         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2418         tcg_temp_free_i32(t32);
2419     } else if (arg2 == 64
2420                && !tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I64, 0)
2421                && tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
2422         /* This equivalence has the advantage of not requiring a fixup.  */
2423         TCGv_i64 t = tcg_temp_ebb_new_i64();
2424         tcg_gen_subi_i64(t, arg1, 1);
2425         tcg_gen_andc_i64(t, t, arg1);
2426         tcg_gen_ctpop_i64(ret, t);
2427         tcg_temp_free_i64(t);
2428     } else {
2429         tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2430     }
2431 }
2432 
tcg_gen_clrsb_i64(TCGv_i64 ret,TCGv_i64 arg)2433 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2434 {
2435     if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
2436         TCGv_i64 t = tcg_temp_ebb_new_i64();
2437         tcg_gen_sari_i64(t, arg, 63);
2438         tcg_gen_xor_i64(t, t, arg);
2439         tcg_gen_clzi_i64(t, t, 64);
2440         tcg_gen_subi_i64(ret, t, 1);
2441         tcg_temp_free_i64(t);
2442     } else {
2443         gen_helper_clrsb_i64(ret, arg);
2444     }
2445 }
2446 
tcg_gen_ctpop_i64(TCGv_i64 ret,TCGv_i64 arg1)2447 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2448 {
2449     if (TCG_TARGET_REG_BITS == 64) {
2450         if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
2451             tcg_gen_op2_i64(INDEX_op_ctpop, ret, arg1);
2452             return;
2453         }
2454     } else {
2455         if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I32, 0)) {
2456             tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2457             tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2458             tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2459             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2460             return;
2461         }
2462     }
2463     gen_helper_ctpop_i64(ret, arg1);
2464 }
2465 
tcg_gen_rotl_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2466 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2467 {
2468     if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
2469         tcg_gen_op3_i64(INDEX_op_rotl, ret, arg1, arg2);
2470     } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
2471         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2472         tcg_gen_neg_i64(t0, arg2);
2473         tcg_gen_op3_i64(INDEX_op_rotr, ret, arg1, t0);
2474         tcg_temp_free_i64(t0);
2475     } else {
2476         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2477         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2478         tcg_gen_shl_i64(t0, arg1, arg2);
2479         tcg_gen_neg_i64(t1, arg2);
2480         tcg_gen_shr_i64(t1, arg1, t1);
2481         tcg_gen_or_i64(ret, t0, t1);
2482         tcg_temp_free_i64(t0);
2483         tcg_temp_free_i64(t1);
2484     }
2485 }
2486 
tcg_gen_rotli_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)2487 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2488 {
2489     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2490     /* some cases can be optimized here */
2491     if (arg2 == 0) {
2492         tcg_gen_mov_i64(ret, arg1);
2493     } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
2494         TCGv_i64 t0 = tcg_constant_i64(arg2);
2495         tcg_gen_op3_i64(INDEX_op_rotl, ret, arg1, t0);
2496     } else if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I64, 0)) {
2497         TCGv_i64 t0 = tcg_constant_i64(64 - arg2);
2498         tcg_gen_op3_i64(INDEX_op_rotr, ret, arg1, t0);
2499     } else {
2500         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2501         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2502         tcg_gen_shli_i64(t0, arg1, arg2);
2503         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2504         tcg_gen_or_i64(ret, t0, t1);
2505         tcg_temp_free_i64(t0);
2506         tcg_temp_free_i64(t1);
2507     }
2508 }
2509 
tcg_gen_rotr_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2)2510 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2511 {
2512     if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I64, 0)) {
2513         tcg_gen_op3_i64(INDEX_op_rotr, ret, arg1, arg2);
2514     } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
2515         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2516         tcg_gen_neg_i64(t0, arg2);
2517         tcg_gen_op3_i64(INDEX_op_rotl, ret, arg1, t0);
2518         tcg_temp_free_i64(t0);
2519     } else {
2520         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2521         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2522         tcg_gen_shr_i64(t0, arg1, arg2);
2523         tcg_gen_neg_i64(t1, arg2);
2524         tcg_gen_shl_i64(t1, arg1, t1);
2525         tcg_gen_or_i64(ret, t0, t1);
2526         tcg_temp_free_i64(t0);
2527         tcg_temp_free_i64(t1);
2528     }
2529 }
2530 
tcg_gen_rotri_i64(TCGv_i64 ret,TCGv_i64 arg1,int64_t arg2)2531 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2532 {
2533     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2534     tcg_gen_rotli_i64(ret, arg1, -arg2 & 63);
2535 }
2536 
tcg_gen_deposit_i64(TCGv_i64 ret,TCGv_i64 arg1,TCGv_i64 arg2,unsigned int ofs,unsigned int len)2537 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2538                          unsigned int ofs, unsigned int len)
2539 {
2540     uint64_t mask;
2541     TCGv_i64 t1;
2542 
2543     tcg_debug_assert(ofs < 64);
2544     tcg_debug_assert(len > 0);
2545     tcg_debug_assert(len <= 64);
2546     tcg_debug_assert(ofs + len <= 64);
2547 
2548     if (len == 64) {
2549         tcg_gen_mov_i64(ret, arg2);
2550         return;
2551     }
2552 
2553     if (TCG_TARGET_REG_BITS == 64) {
2554         if (TCG_TARGET_deposit_valid(TCG_TYPE_I64, ofs, len)) {
2555             tcg_gen_op5ii_i64(INDEX_op_deposit, ret, arg1, arg2, ofs, len);
2556             return;
2557         }
2558     } else {
2559         if (ofs >= 32) {
2560             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2561                                 TCGV_LOW(arg2), ofs - 32, len);
2562             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2563             return;
2564         }
2565         if (ofs + len <= 32) {
2566             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2567                                 TCGV_LOW(arg2), ofs, len);
2568             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2569             return;
2570         }
2571     }
2572 
2573     t1 = tcg_temp_ebb_new_i64();
2574 
2575     if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I64, 0)) {
2576         if (ofs + len == 64) {
2577             tcg_gen_shli_i64(t1, arg1, len);
2578             tcg_gen_extract2_i64(ret, t1, arg2, len);
2579             goto done;
2580         }
2581         if (ofs == 0) {
2582             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2583             tcg_gen_rotli_i64(ret, ret, len);
2584             goto done;
2585         }
2586     }
2587 
2588     mask = (1ull << len) - 1;
2589     if (ofs + len < 64) {
2590         tcg_gen_andi_i64(t1, arg2, mask);
2591         tcg_gen_shli_i64(t1, t1, ofs);
2592     } else {
2593         tcg_gen_shli_i64(t1, arg2, ofs);
2594     }
2595     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2596     tcg_gen_or_i64(ret, ret, t1);
2597  done:
2598     tcg_temp_free_i64(t1);
2599 }
2600 
tcg_gen_deposit_z_i64(TCGv_i64 ret,TCGv_i64 arg,unsigned int ofs,unsigned int len)2601 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2602                            unsigned int ofs, unsigned int len)
2603 {
2604     tcg_debug_assert(ofs < 64);
2605     tcg_debug_assert(len > 0);
2606     tcg_debug_assert(len <= 64);
2607     tcg_debug_assert(ofs + len <= 64);
2608 
2609     if (ofs + len == 64) {
2610         tcg_gen_shli_i64(ret, arg, ofs);
2611     } else if (ofs == 0) {
2612         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2613     } else if (TCG_TARGET_REG_BITS == 64 &&
2614                TCG_TARGET_deposit_valid(TCG_TYPE_I64, ofs, len)) {
2615         TCGv_i64 zero = tcg_constant_i64(0);
2616         tcg_gen_op5ii_i64(INDEX_op_deposit, ret, zero, arg, ofs, len);
2617     } else {
2618         if (TCG_TARGET_REG_BITS == 32) {
2619             if (ofs >= 32) {
2620                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2621                                       ofs - 32, len);
2622                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2623                 return;
2624             }
2625             if (ofs + len <= 32) {
2626                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2627                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2628                 return;
2629             }
2630         }
2631         /*
2632          * To help two-operand hosts we prefer to zero-extend first,
2633          * which allows ARG to stay live.
2634          */
2635         if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, len)) {
2636             tcg_gen_extract_i64(ret, arg, 0, len);
2637             tcg_gen_shli_i64(ret, ret, ofs);
2638             return;
2639         }
2640         /* Otherwise prefer zero-extension over AND for code size.  */
2641         if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, ofs + len)) {
2642             tcg_gen_shli_i64(ret, arg, ofs);
2643             tcg_gen_extract_i64(ret, ret, 0, ofs + len);
2644             return;
2645         }
2646         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2647         tcg_gen_shli_i64(ret, ret, ofs);
2648     }
2649 }
2650 
tcg_gen_extract_i64(TCGv_i64 ret,TCGv_i64 arg,unsigned int ofs,unsigned int len)2651 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2652                          unsigned int ofs, unsigned int len)
2653 {
2654     tcg_debug_assert(ofs < 64);
2655     tcg_debug_assert(len > 0);
2656     tcg_debug_assert(len <= 64);
2657     tcg_debug_assert(ofs + len <= 64);
2658 
2659     /* Canonicalize certain special cases, even if extract is supported.  */
2660     if (ofs + len == 64) {
2661         tcg_gen_shri_i64(ret, arg, 64 - len);
2662         return;
2663     }
2664 
2665     if (TCG_TARGET_REG_BITS == 32) {
2666         /* Look for a 32-bit extract within one of the two words.  */
2667         if (ofs >= 32) {
2668             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2669             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2670             return;
2671         }
2672         if (ofs + len <= 32) {
2673             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2674             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2675             return;
2676         }
2677 
2678         /* The field is split across two words. */
2679         tcg_gen_extract2_i32(TCGV_LOW(ret), TCGV_LOW(arg),
2680                              TCGV_HIGH(arg), ofs);
2681         if (len <= 32) {
2682             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(ret), 0, len);
2683             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2684         } else {
2685             tcg_gen_extract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg),
2686                                 ofs, len - 32);
2687         }
2688         return;
2689     }
2690 
2691     if (TCG_TARGET_extract_valid(TCG_TYPE_I64, ofs, len)) {
2692         tcg_gen_op4ii_i64(INDEX_op_extract, ret, arg, ofs, len);
2693         return;
2694     }
2695     if (ofs == 0) {
2696         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2697         return;
2698     }
2699 
2700     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2701     if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, ofs + len)) {
2702         tcg_gen_op4ii_i64(INDEX_op_extract, ret, arg, 0, ofs + len);
2703         tcg_gen_shri_i64(ret, ret, ofs);
2704         return;
2705     }
2706 
2707     /* ??? Ideally we'd know what values are available for immediate AND.
2708        Assume that 8 bits are available, plus the special cases of 16 and 32,
2709        so that we get ext8u, ext16u, and ext32u.  */
2710     switch (len) {
2711     case 1 ... 8: case 16: case 32:
2712         tcg_gen_shri_i64(ret, arg, ofs);
2713         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2714         break;
2715     default:
2716         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2717         tcg_gen_shri_i64(ret, ret, 64 - len);
2718         break;
2719     }
2720 }
2721 
tcg_gen_sextract_i64(TCGv_i64 ret,TCGv_i64 arg,unsigned int ofs,unsigned int len)2722 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2723                           unsigned int ofs, unsigned int len)
2724 {
2725     tcg_debug_assert(ofs < 64);
2726     tcg_debug_assert(len > 0);
2727     tcg_debug_assert(len <= 64);
2728     tcg_debug_assert(ofs + len <= 64);
2729 
2730     /* Canonicalize certain special cases, even if sextract is supported.  */
2731     if (ofs + len == 64) {
2732         tcg_gen_sari_i64(ret, arg, 64 - len);
2733         return;
2734     }
2735 
2736     if (TCG_TARGET_REG_BITS == 32) {
2737         /* Look for a 32-bit extract within one of the two words.  */
2738         if (ofs >= 32) {
2739             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2740         } else if (ofs + len <= 32) {
2741             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2742         } else if (ofs == 0) {
2743             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2744             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2745             return;
2746         } else if (len > 32) {
2747             TCGv_i32 t = tcg_temp_ebb_new_i32();
2748             /* Extract the bits for the high word normally.  */
2749             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2750             /* Shift the field down for the low part.  */
2751             tcg_gen_shri_i64(ret, arg, ofs);
2752             /* Overwrite the shift into the high part.  */
2753             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2754             tcg_temp_free_i32(t);
2755             return;
2756         } else {
2757             /* Shift the field down for the low part, such that the
2758                field sits at the MSB.  */
2759             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2760             /* Shift the field down from the MSB, sign extending.  */
2761             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2762         }
2763         /* Sign-extend the field from 32 bits.  */
2764         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2765         return;
2766     }
2767 
2768     if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, ofs, len)) {
2769         tcg_gen_op4ii_i64(INDEX_op_sextract, ret, arg, ofs, len);
2770         return;
2771     }
2772 
2773     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2774     if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, 0, ofs + len)) {
2775         tcg_gen_op4ii_i64(INDEX_op_sextract, ret, arg, 0, ofs + len);
2776         tcg_gen_sari_i64(ret, ret, ofs);
2777         return;
2778     }
2779     if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, 0, len)) {
2780         tcg_gen_shri_i64(ret, arg, ofs);
2781         tcg_gen_op4ii_i64(INDEX_op_sextract, ret, ret, 0, len);
2782         return;
2783     }
2784 
2785     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2786     tcg_gen_sari_i64(ret, ret, 64 - len);
2787 }
2788 
2789 /*
2790  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2791  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2792  */
tcg_gen_extract2_i64(TCGv_i64 ret,TCGv_i64 al,TCGv_i64 ah,unsigned int ofs)2793 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2794                           unsigned int ofs)
2795 {
2796     tcg_debug_assert(ofs <= 64);
2797     if (ofs == 0) {
2798         tcg_gen_mov_i64(ret, al);
2799     } else if (ofs == 64) {
2800         tcg_gen_mov_i64(ret, ah);
2801     } else if (al == ah) {
2802         tcg_gen_rotri_i64(ret, al, ofs);
2803     } else if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I64, 0)) {
2804         tcg_gen_op4i_i64(INDEX_op_extract2, ret, al, ah, ofs);
2805     } else {
2806         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2807         tcg_gen_shri_i64(t0, al, ofs);
2808         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2809         tcg_temp_free_i64(t0);
2810     }
2811 }
2812 
tcg_gen_movcond_i64(TCGCond cond,TCGv_i64 ret,TCGv_i64 c1,TCGv_i64 c2,TCGv_i64 v1,TCGv_i64 v2)2813 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2814                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2815 {
2816     if (cond == TCG_COND_ALWAYS) {
2817         tcg_gen_mov_i64(ret, v1);
2818     } else if (cond == TCG_COND_NEVER) {
2819         tcg_gen_mov_i64(ret, v2);
2820     } else if (TCG_TARGET_REG_BITS == 64) {
2821         tcg_gen_op6i_i64(INDEX_op_movcond, ret, c1, c2, v1, v2, cond);
2822     } else {
2823         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2824         TCGv_i32 zero = tcg_constant_i32(0);
2825 
2826         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2827                          TCGV_LOW(c1), TCGV_HIGH(c1),
2828                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2829 
2830         tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, zero,
2831                             TCGV_LOW(v1), TCGV_LOW(v2));
2832         tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, zero,
2833                             TCGV_HIGH(v1), TCGV_HIGH(v2));
2834 
2835         tcg_temp_free_i32(t0);
2836     }
2837 }
2838 
tcg_gen_add2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 al,TCGv_i64 ah,TCGv_i64 bl,TCGv_i64 bh)2839 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2840                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2841 {
2842     if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_REG, 0)) {
2843         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2844 
2845         if (TCG_TARGET_REG_BITS == 32) {
2846             tcg_gen_op3_i32(INDEX_op_addco, TCGV_LOW(t0),
2847                             TCGV_LOW(al), TCGV_LOW(bl));
2848             tcg_gen_op3_i32(INDEX_op_addcio, TCGV_HIGH(t0),
2849                             TCGV_HIGH(al), TCGV_HIGH(bl));
2850             tcg_gen_op3_i32(INDEX_op_addcio, TCGV_LOW(rh),
2851                             TCGV_LOW(ah), TCGV_LOW(bh));
2852             tcg_gen_op3_i32(INDEX_op_addci, TCGV_HIGH(rh),
2853                             TCGV_HIGH(ah), TCGV_HIGH(bh));
2854         } else {
2855             tcg_gen_op3_i64(INDEX_op_addco, t0, al, bl);
2856             tcg_gen_op3_i64(INDEX_op_addci, rh, ah, bh);
2857         }
2858 
2859         tcg_gen_mov_i64(rl, t0);
2860         tcg_temp_free_i64(t0);
2861     } else {
2862         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2863         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2864         tcg_gen_add_i64(t0, al, bl);
2865         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2866         tcg_gen_add_i64(rh, ah, bh);
2867         tcg_gen_add_i64(rh, rh, t1);
2868         tcg_gen_mov_i64(rl, t0);
2869         tcg_temp_free_i64(t0);
2870         tcg_temp_free_i64(t1);
2871     }
2872 }
2873 
tcg_gen_addcio_i64(TCGv_i64 r,TCGv_i64 co,TCGv_i64 a,TCGv_i64 b,TCGv_i64 ci)2874 void tcg_gen_addcio_i64(TCGv_i64 r, TCGv_i64 co,
2875                         TCGv_i64 a, TCGv_i64 b, TCGv_i64 ci)
2876 {
2877     if (TCG_TARGET_REG_BITS == 64) {
2878         if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I64, 0)) {
2879             TCGv_i64 discard = tcg_temp_ebb_new_i64();
2880             TCGv_i64 zero = tcg_constant_i64(0);
2881             TCGv_i64 mone = tcg_constant_i64(-1);
2882 
2883             tcg_gen_op3_i64(INDEX_op_addco, discard, ci, mone);
2884             tcg_gen_op3_i64(INDEX_op_addcio, r, a, b);
2885             tcg_gen_op3_i64(INDEX_op_addci, co, zero, zero);
2886             tcg_temp_free_i64(discard);
2887         } else {
2888             TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2889             TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2890 
2891             tcg_gen_add_i64(t0, a, b);
2892             tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, a);
2893             tcg_gen_add_i64(r, t0, ci);
2894             tcg_gen_setcond_i64(TCG_COND_LTU, t0, r, t0);
2895             tcg_gen_or_i64(co, t0, t1);
2896 
2897             tcg_temp_free_i64(t0);
2898             tcg_temp_free_i64(t1);
2899         }
2900     } else {
2901         if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I32, 0)) {
2902             TCGv_i32 discard = tcg_temp_ebb_new_i32();
2903             TCGv_i32 zero = tcg_constant_i32(0);
2904             TCGv_i32 mone = tcg_constant_i32(-1);
2905 
2906             tcg_gen_op3_i32(INDEX_op_addco, discard, TCGV_LOW(ci), mone);
2907             tcg_gen_op3_i32(INDEX_op_addcio, discard, TCGV_HIGH(ci), mone);
2908             tcg_gen_op3_i32(INDEX_op_addcio, TCGV_LOW(r),
2909                             TCGV_LOW(a), TCGV_LOW(b));
2910             tcg_gen_op3_i32(INDEX_op_addcio, TCGV_HIGH(r),
2911                             TCGV_HIGH(a), TCGV_HIGH(b));
2912             tcg_gen_op3_i32(INDEX_op_addci, TCGV_LOW(co), zero, zero);
2913             tcg_temp_free_i32(discard);
2914         } else {
2915             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2916             TCGv_i32 c0 = tcg_temp_ebb_new_i32();
2917             TCGv_i32 c1 = tcg_temp_ebb_new_i32();
2918 
2919             tcg_gen_or_i32(c1, TCGV_LOW(ci), TCGV_HIGH(ci));
2920             tcg_gen_setcondi_i32(TCG_COND_NE, c1, c1, 0);
2921 
2922             tcg_gen_add_i32(t0, TCGV_LOW(a), TCGV_LOW(b));
2923             tcg_gen_setcond_i32(TCG_COND_LTU, c0, t0, TCGV_LOW(a));
2924             tcg_gen_add_i32(TCGV_LOW(r), t0, c1);
2925             tcg_gen_setcond_i32(TCG_COND_LTU, c1, TCGV_LOW(r), c1);
2926             tcg_gen_or_i32(c1, c1, c0);
2927 
2928             tcg_gen_add_i32(t0, TCGV_HIGH(a), TCGV_HIGH(b));
2929             tcg_gen_setcond_i32(TCG_COND_LTU, c0, t0, TCGV_HIGH(a));
2930             tcg_gen_add_i32(TCGV_HIGH(r), t0, c1);
2931             tcg_gen_setcond_i32(TCG_COND_LTU, c1, TCGV_HIGH(r), c1);
2932             tcg_gen_or_i32(TCGV_LOW(co), c0, c1);
2933 
2934             tcg_temp_free_i32(t0);
2935             tcg_temp_free_i32(c0);
2936             tcg_temp_free_i32(c1);
2937         }
2938         tcg_gen_movi_i32(TCGV_HIGH(co), 0);
2939     }
2940 }
2941 
tcg_gen_sub2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 al,TCGv_i64 ah,TCGv_i64 bl,TCGv_i64 bh)2942 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2943                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2944 {
2945     if (tcg_op_supported(INDEX_op_subbi, TCG_TYPE_REG, 0)) {
2946         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2947 
2948         if (TCG_TARGET_REG_BITS == 32) {
2949             tcg_gen_op3_i32(INDEX_op_subbo, TCGV_LOW(t0),
2950                             TCGV_LOW(al), TCGV_LOW(bl));
2951             tcg_gen_op3_i32(INDEX_op_subbio, TCGV_HIGH(t0),
2952                             TCGV_HIGH(al), TCGV_HIGH(bl));
2953             tcg_gen_op3_i32(INDEX_op_subbio, TCGV_LOW(rh),
2954                             TCGV_LOW(ah), TCGV_LOW(bh));
2955             tcg_gen_op3_i32(INDEX_op_subbi, TCGV_HIGH(rh),
2956                             TCGV_HIGH(ah), TCGV_HIGH(bh));
2957         } else {
2958             tcg_gen_op3_i64(INDEX_op_subbo, t0, al, bl);
2959             tcg_gen_op3_i64(INDEX_op_subbi, rh, ah, bh);
2960         }
2961 
2962         tcg_gen_mov_i64(rl, t0);
2963         tcg_temp_free_i64(t0);
2964     } else {
2965         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2966         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2967         tcg_gen_sub_i64(t0, al, bl);
2968         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2969         tcg_gen_sub_i64(rh, ah, bh);
2970         tcg_gen_sub_i64(rh, rh, t1);
2971         tcg_gen_mov_i64(rl, t0);
2972         tcg_temp_free_i64(t0);
2973         tcg_temp_free_i64(t1);
2974     }
2975 }
2976 
tcg_gen_mulu2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 arg1,TCGv_i64 arg2)2977 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2978 {
2979     if (tcg_op_supported(INDEX_op_mulu2, TCG_TYPE_I64, 0)) {
2980         tcg_gen_op4_i64(INDEX_op_mulu2, rl, rh, arg1, arg2);
2981     } else if (tcg_op_supported(INDEX_op_muluh, TCG_TYPE_I64, 0)) {
2982         TCGv_i64 t = tcg_temp_ebb_new_i64();
2983         tcg_gen_op3_i64(INDEX_op_mul, t, arg1, arg2);
2984         tcg_gen_op3_i64(INDEX_op_muluh, rh, arg1, arg2);
2985         tcg_gen_mov_i64(rl, t);
2986         tcg_temp_free_i64(t);
2987     } else {
2988         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2989         tcg_gen_mul_i64(t0, arg1, arg2);
2990         gen_helper_muluh_i64(rh, arg1, arg2);
2991         tcg_gen_mov_i64(rl, t0);
2992         tcg_temp_free_i64(t0);
2993     }
2994 }
2995 
tcg_gen_muls2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 arg1,TCGv_i64 arg2)2996 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2997 {
2998     if (tcg_op_supported(INDEX_op_muls2, TCG_TYPE_I64, 0)) {
2999         tcg_gen_op4_i64(INDEX_op_muls2, rl, rh, arg1, arg2);
3000     } else if (tcg_op_supported(INDEX_op_mulsh, TCG_TYPE_I64, 0)) {
3001         TCGv_i64 t = tcg_temp_ebb_new_i64();
3002         tcg_gen_op3_i64(INDEX_op_mul, t, arg1, arg2);
3003         tcg_gen_op3_i64(INDEX_op_mulsh, rh, arg1, arg2);
3004         tcg_gen_mov_i64(rl, t);
3005         tcg_temp_free_i64(t);
3006     } else if (tcg_op_supported(INDEX_op_mulu2, TCG_TYPE_I64, 0) ||
3007                tcg_op_supported(INDEX_op_muluh, TCG_TYPE_I64, 0)) {
3008         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3009         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3010         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3011         TCGv_i64 t3 = tcg_temp_ebb_new_i64();
3012         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3013         /* Adjust for negative inputs.  */
3014         tcg_gen_sari_i64(t2, arg1, 63);
3015         tcg_gen_sari_i64(t3, arg2, 63);
3016         tcg_gen_and_i64(t2, t2, arg2);
3017         tcg_gen_and_i64(t3, t3, arg1);
3018         tcg_gen_sub_i64(rh, t1, t2);
3019         tcg_gen_sub_i64(rh, rh, t3);
3020         tcg_gen_mov_i64(rl, t0);
3021         tcg_temp_free_i64(t0);
3022         tcg_temp_free_i64(t1);
3023         tcg_temp_free_i64(t2);
3024         tcg_temp_free_i64(t3);
3025     } else {
3026         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3027         tcg_gen_mul_i64(t0, arg1, arg2);
3028         gen_helper_mulsh_i64(rh, arg1, arg2);
3029         tcg_gen_mov_i64(rl, t0);
3030         tcg_temp_free_i64(t0);
3031     }
3032 }
3033 
tcg_gen_mulsu2_i64(TCGv_i64 rl,TCGv_i64 rh,TCGv_i64 arg1,TCGv_i64 arg2)3034 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3035 {
3036     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3037     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3038     TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3039     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3040     /* Adjust for negative input for the signed arg1.  */
3041     tcg_gen_sari_i64(t2, arg1, 63);
3042     tcg_gen_and_i64(t2, t2, arg2);
3043     tcg_gen_sub_i64(rh, t1, t2);
3044     tcg_gen_mov_i64(rl, t0);
3045     tcg_temp_free_i64(t0);
3046     tcg_temp_free_i64(t1);
3047     tcg_temp_free_i64(t2);
3048 }
3049 
tcg_gen_smin_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3050 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3051 {
3052     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
3053 }
3054 
tcg_gen_umin_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3055 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3056 {
3057     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
3058 }
3059 
tcg_gen_smax_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3060 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3061 {
3062     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
3063 }
3064 
tcg_gen_umax_i64(TCGv_i64 ret,TCGv_i64 a,TCGv_i64 b)3065 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3066 {
3067     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
3068 }
3069 
tcg_gen_abs_i64(TCGv_i64 ret,TCGv_i64 a)3070 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
3071 {
3072     TCGv_i64 t = tcg_temp_ebb_new_i64();
3073 
3074     tcg_gen_sari_i64(t, a, 63);
3075     tcg_gen_xor_i64(ret, a, t);
3076     tcg_gen_sub_i64(ret, ret, t);
3077     tcg_temp_free_i64(t);
3078 }
3079 
3080 /* Size changing operations.  */
3081 
tcg_gen_extrl_i64_i32(TCGv_i32 ret,TCGv_i64 arg)3082 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3083 {
3084     if (TCG_TARGET_REG_BITS == 32) {
3085         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
3086     } else {
3087         tcg_gen_op2(INDEX_op_extrl_i64_i32, TCG_TYPE_I32,
3088                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3089     }
3090 }
3091 
tcg_gen_extrh_i64_i32(TCGv_i32 ret,TCGv_i64 arg)3092 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3093 {
3094     if (TCG_TARGET_REG_BITS == 32) {
3095         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
3096     } else {
3097         tcg_gen_op2(INDEX_op_extrh_i64_i32, TCG_TYPE_I32,
3098                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3099     }
3100 }
3101 
tcg_gen_extu_i32_i64(TCGv_i64 ret,TCGv_i32 arg)3102 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3103 {
3104     if (TCG_TARGET_REG_BITS == 32) {
3105         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3106         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
3107     } else {
3108         tcg_gen_op2(INDEX_op_extu_i32_i64, TCG_TYPE_I64,
3109                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3110     }
3111 }
3112 
tcg_gen_ext_i32_i64(TCGv_i64 ret,TCGv_i32 arg)3113 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3114 {
3115     if (TCG_TARGET_REG_BITS == 32) {
3116         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3117         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
3118     } else {
3119         tcg_gen_op2(INDEX_op_ext_i32_i64, TCG_TYPE_I64,
3120                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3121     }
3122 }
3123 
tcg_gen_concat_i32_i64(TCGv_i64 dest,TCGv_i32 low,TCGv_i32 high)3124 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
3125 {
3126     TCGv_i64 tmp;
3127 
3128     if (TCG_TARGET_REG_BITS == 32) {
3129         tcg_gen_mov_i32(TCGV_LOW(dest), low);
3130         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
3131         return;
3132     }
3133 
3134     tmp = tcg_temp_ebb_new_i64();
3135     /* These extensions are only needed for type correctness.
3136        We may be able to do better given target specific information.  */
3137     tcg_gen_extu_i32_i64(tmp, high);
3138     tcg_gen_extu_i32_i64(dest, low);
3139     /* If deposit is available, use it.  Otherwise use the extra
3140        knowledge that we have of the zero-extensions above.  */
3141     if (TCG_TARGET_deposit_valid(TCG_TYPE_I64, 32, 32)) {
3142         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
3143     } else {
3144         tcg_gen_shli_i64(tmp, tmp, 32);
3145         tcg_gen_or_i64(dest, dest, tmp);
3146     }
3147     tcg_temp_free_i64(tmp);
3148 }
3149 
tcg_gen_extr_i64_i32(TCGv_i32 lo,TCGv_i32 hi,TCGv_i64 arg)3150 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
3151 {
3152     if (TCG_TARGET_REG_BITS == 32) {
3153         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
3154         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
3155     } else {
3156         tcg_gen_extrl_i64_i32(lo, arg);
3157         tcg_gen_extrh_i64_i32(hi, arg);
3158     }
3159 }
3160 
tcg_gen_extr32_i64(TCGv_i64 lo,TCGv_i64 hi,TCGv_i64 arg)3161 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
3162 {
3163     tcg_gen_ext32u_i64(lo, arg);
3164     tcg_gen_shri_i64(hi, arg, 32);
3165 }
3166 
tcg_gen_concat32_i64(TCGv_i64 ret,TCGv_i64 lo,TCGv_i64 hi)3167 void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
3168 {
3169     tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
3170 }
3171 
tcg_gen_extr_i128_i64(TCGv_i64 lo,TCGv_i64 hi,TCGv_i128 arg)3172 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
3173 {
3174     tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
3175     tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
3176 }
3177 
tcg_gen_concat_i64_i128(TCGv_i128 ret,TCGv_i64 lo,TCGv_i64 hi)3178 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
3179 {
3180     tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
3181     tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
3182 }
3183 
tcg_gen_mov_i128(TCGv_i128 dst,TCGv_i128 src)3184 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
3185 {
3186     if (dst != src) {
3187         tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
3188         tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
3189     }
3190 }
3191 
tcg_gen_ld_i128(TCGv_i128 ret,TCGv_ptr base,tcg_target_long offset)3192 void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset)
3193 {
3194     if (HOST_BIG_ENDIAN) {
3195         tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset);
3196         tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset + 8);
3197     } else {
3198         tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset);
3199         tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset + 8);
3200     }
3201 }
3202 
tcg_gen_st_i128(TCGv_i128 val,TCGv_ptr base,tcg_target_long offset)3203 void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset)
3204 {
3205     if (HOST_BIG_ENDIAN) {
3206         tcg_gen_st_i64(TCGV128_HIGH(val), base, offset);
3207         tcg_gen_st_i64(TCGV128_LOW(val), base, offset + 8);
3208     } else {
3209         tcg_gen_st_i64(TCGV128_LOW(val), base, offset);
3210         tcg_gen_st_i64(TCGV128_HIGH(val), base, offset + 8);
3211     }
3212 }
3213 
3214 /* QEMU specific operations.  */
3215 
tcg_gen_exit_tb(const TranslationBlock * tb,unsigned idx)3216 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
3217 {
3218     /*
3219      * Let the jit code return the read-only version of the
3220      * TranslationBlock, so that we minimize the pc-relative
3221      * distance of the address of the exit_tb code to TB.
3222      * This will improve utilization of pc-relative address loads.
3223      *
3224      * TODO: Move this to translator_loop, so that all const
3225      * TranslationBlock pointers refer to read-only memory.
3226      * This requires coordination with targets that do not use
3227      * the translator_loop.
3228      */
3229     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
3230 
3231     if (tb == NULL) {
3232         tcg_debug_assert(idx == 0);
3233     } else if (idx <= TB_EXIT_IDXMAX) {
3234 #ifdef CONFIG_DEBUG_TCG
3235         /* This is an exit following a goto_tb.  Verify that we have
3236            seen this numbered exit before, via tcg_gen_goto_tb.  */
3237         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
3238 #endif
3239     } else {
3240         /* This is an exit via the exitreq label.  */
3241         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
3242     }
3243 
3244     tcg_gen_op1i(INDEX_op_exit_tb, 0, val);
3245 }
3246 
tcg_gen_goto_tb(unsigned idx)3247 void tcg_gen_goto_tb(unsigned idx)
3248 {
3249     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3250     tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
3251     /* We only support two chained exits.  */
3252     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
3253 #ifdef CONFIG_DEBUG_TCG
3254     /* Verify that we haven't seen this numbered exit before.  */
3255     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
3256     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
3257 #endif
3258     plugin_gen_disable_mem_helpers();
3259     tcg_gen_op1i(INDEX_op_goto_tb, 0, idx);
3260 }
3261 
tcg_gen_lookup_and_goto_ptr(void)3262 void tcg_gen_lookup_and_goto_ptr(void)
3263 {
3264     TCGv_ptr ptr;
3265 
3266     if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
3267         tcg_gen_exit_tb(NULL, 0);
3268         return;
3269     }
3270 
3271     plugin_gen_disable_mem_helpers();
3272     ptr = tcg_temp_ebb_new_ptr();
3273     gen_helper_lookup_tb_ptr(ptr, tcg_env);
3274     tcg_gen_op1i(INDEX_op_goto_ptr, TCG_TYPE_PTR, tcgv_ptr_arg(ptr));
3275     tcg_temp_free_ptr(ptr);
3276 }
3277