xref: /qemu/tcg/tcg-op.c (revision 513823e7521a09ed7ad1e32e6454bac3b2cbf52d)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32 #include "tcg-has.h"
33 
34 /*
35  * Encourage the compiler to tail-call to a function, rather than inlining.
36  * Minimizes code size across 99 bottles of beer on the wall.
37  */
38 #define NI  __attribute__((noinline))
39 
40 TCGOp * NI tcg_gen_op1(TCGOpcode opc, TCGType type, TCGArg a1)
41 {
42     TCGOp *op = tcg_emit_op(opc, 1);
43     TCGOP_TYPE(op) = type;
44     op->args[0] = a1;
45     return op;
46 }
47 
48 TCGOp * NI tcg_gen_op2(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2)
49 {
50     TCGOp *op = tcg_emit_op(opc, 2);
51     TCGOP_TYPE(op) = type;
52     op->args[0] = a1;
53     op->args[1] = a2;
54     return op;
55 }
56 
57 TCGOp * NI tcg_gen_op3(TCGOpcode opc, TCGType type, TCGArg a1,
58                        TCGArg a2, TCGArg a3)
59 {
60     TCGOp *op = tcg_emit_op(opc, 3);
61     TCGOP_TYPE(op) = type;
62     op->args[0] = a1;
63     op->args[1] = a2;
64     op->args[2] = a3;
65     return op;
66 }
67 
68 TCGOp * NI tcg_gen_op4(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2,
69                        TCGArg a3, TCGArg a4)
70 {
71     TCGOp *op = tcg_emit_op(opc, 4);
72     TCGOP_TYPE(op) = type;
73     op->args[0] = a1;
74     op->args[1] = a2;
75     op->args[2] = a3;
76     op->args[3] = a4;
77     return op;
78 }
79 
80 TCGOp * NI tcg_gen_op5(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2,
81                        TCGArg a3, TCGArg a4, TCGArg a5)
82 {
83     TCGOp *op = tcg_emit_op(opc, 5);
84     TCGOP_TYPE(op) = type;
85     op->args[0] = a1;
86     op->args[1] = a2;
87     op->args[2] = a3;
88     op->args[3] = a4;
89     op->args[4] = a5;
90     return op;
91 }
92 
93 TCGOp * NI tcg_gen_op6(TCGOpcode opc, TCGType type, TCGArg a1, TCGArg a2,
94                        TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
95 {
96     TCGOp *op = tcg_emit_op(opc, 6);
97     TCGOP_TYPE(op) = type;
98     op->args[0] = a1;
99     op->args[1] = a2;
100     op->args[2] = a3;
101     op->args[3] = a4;
102     op->args[4] = a5;
103     op->args[5] = a6;
104     return op;
105 }
106 
107 /*
108  * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
109  * assertion check.  Force tail calls to avoid too much code expansion.
110  */
111 #ifdef CONFIG_DEBUG_TCG
112 # define DNI NI
113 #else
114 # define DNI
115 #endif
116 
117 static void DNI tcg_gen_op1_i32(TCGOpcode opc, TCGType type, TCGv_i32 a1)
118 {
119     tcg_gen_op1(opc, type, tcgv_i32_arg(a1));
120 }
121 
122 static void DNI tcg_gen_op1_i64(TCGOpcode opc, TCGType type, TCGv_i64 a1)
123 {
124     tcg_gen_op1(opc, type, tcgv_i64_arg(a1));
125 }
126 
127 static TCGOp * DNI tcg_gen_op1i(TCGOpcode opc, TCGType type, TCGArg a1)
128 {
129     return tcg_gen_op1(opc, type, a1);
130 }
131 
132 static void DNI tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2)
133 {
134     tcg_gen_op2(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2));
135 }
136 
137 static void DNI tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2)
138 {
139     tcg_gen_op2(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2));
140 }
141 
142 static void DNI tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 a1,
143                                 TCGv_i32 a2, TCGv_i32 a3)
144 {
145     tcg_gen_op3(opc, TCG_TYPE_I32, tcgv_i32_arg(a1),
146                 tcgv_i32_arg(a2), tcgv_i32_arg(a3));
147 }
148 
149 static void DNI tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 a1,
150                                 TCGv_i64 a2, TCGv_i64 a3)
151 {
152     tcg_gen_op3(opc, TCG_TYPE_I64, tcgv_i64_arg(a1),
153                 tcgv_i64_arg(a2), tcgv_i64_arg(a3));
154 }
155 
156 static void DNI tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 a1,
157                                  TCGv_i32 a2, TCGArg a3)
158 {
159     tcg_gen_op3(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3);
160 }
161 
162 static void DNI tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 a1,
163                                  TCGv_i64 a2, TCGArg a3)
164 {
165     tcg_gen_op3(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3);
166 }
167 
168 static void DNI tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
169                                     TCGv_ptr base, TCGArg offset)
170 {
171     tcg_gen_op3(opc, TCG_TYPE_I32, tcgv_i32_arg(val),
172                 tcgv_ptr_arg(base), offset);
173 }
174 
175 static void DNI tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
176                                     TCGv_ptr base, TCGArg offset)
177 {
178     tcg_gen_op3(opc, TCG_TYPE_I64, tcgv_i64_arg(val),
179                 tcgv_ptr_arg(base), offset);
180 }
181 
182 static void DNI tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
183                                 TCGv_i32 a3, TCGv_i32 a4)
184 {
185     tcg_gen_op4(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
186                 tcgv_i32_arg(a3), tcgv_i32_arg(a4));
187 }
188 
189 static void DNI tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
190                                 TCGv_i64 a3, TCGv_i64 a4)
191 {
192     tcg_gen_op4(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
193                 tcgv_i64_arg(a3), tcgv_i64_arg(a4));
194 }
195 
196 static void DNI tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
197                                  TCGv_i32 a3, TCGArg a4)
198 {
199     tcg_gen_op4(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
200                 tcgv_i32_arg(a3), a4);
201 }
202 
203 static void DNI tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
204                                  TCGv_i64 a3, TCGArg a4)
205 {
206     tcg_gen_op4(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
207                 tcgv_i64_arg(a3), a4);
208 }
209 
210 static TCGOp * DNI tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
211                                      TCGArg a3, TCGArg a4)
212 {
213     return tcg_gen_op4(opc, TCG_TYPE_I32,
214                        tcgv_i32_arg(a1), tcgv_i32_arg(a2), a3, a4);
215 }
216 
217 static TCGOp * DNI tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
218                                      TCGArg a3, TCGArg a4)
219 {
220     return tcg_gen_op4(opc, TCG_TYPE_I64,
221                        tcgv_i64_arg(a1), tcgv_i64_arg(a2), a3, a4);
222 }
223 
224 static void DNI tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
225                                 TCGv_i32 a3, TCGv_i32 a4, TCGv_i32 a5)
226 {
227     tcg_gen_op5(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
228                 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5));
229 }
230 
231 static void DNI tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
232                                 TCGv_i64 a3, TCGv_i64 a4, TCGv_i64 a5)
233 {
234     tcg_gen_op5(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
235                 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5));
236 }
237 
238 static void DNI tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
239                                   TCGv_i32 a3, TCGArg a4, TCGArg a5)
240 {
241     tcg_gen_op5(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
242                 tcgv_i32_arg(a3), a4, a5);
243 }
244 
245 static void DNI tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
246                                   TCGv_i64 a3, TCGArg a4, TCGArg a5)
247 {
248     tcg_gen_op5(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
249                 tcgv_i64_arg(a3), a4, a5);
250 }
251 
252 static void DNI tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
253                                 TCGv_i32 a3, TCGv_i32 a4,
254                                 TCGv_i32 a5, TCGv_i32 a6)
255 {
256     tcg_gen_op6(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
257                 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5),
258                 tcgv_i32_arg(a6));
259 }
260 
261 static void DNI tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
262                                 TCGv_i64 a3, TCGv_i64 a4,
263                                 TCGv_i64 a5, TCGv_i64 a6)
264 {
265     tcg_gen_op6(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
266                 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5),
267                 tcgv_i64_arg(a6));
268 }
269 
270 static void DNI tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
271                                  TCGv_i32 a3, TCGv_i32 a4,
272                                  TCGv_i32 a5, TCGArg a6)
273 {
274     tcg_gen_op6(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
275                 tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5), a6);
276 }
277 
278 static void DNI tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
279                                  TCGv_i64 a3, TCGv_i64 a4,
280                                  TCGv_i64 a5, TCGArg a6)
281 {
282     tcg_gen_op6(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
283                 tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5), a6);
284 }
285 
286 static TCGOp * DNI tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
287                                      TCGv_i32 a3, TCGv_i32 a4,
288                                      TCGArg a5, TCGArg a6)
289 {
290     return tcg_gen_op6(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
291                        tcgv_i32_arg(a3), tcgv_i32_arg(a4), a5, a6);
292 }
293 
294 /* Generic ops.  */
295 
296 void gen_set_label(TCGLabel *l)
297 {
298     l->present = 1;
299     tcg_gen_op1(INDEX_op_set_label, 0, label_arg(l));
300 }
301 
302 static void add_as_label_use(TCGLabel *l, TCGOp *op)
303 {
304     TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
305 
306     u->op = op;
307     QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
308 }
309 
310 void tcg_gen_br(TCGLabel *l)
311 {
312     add_as_label_use(l, tcg_gen_op1(INDEX_op_br, 0, label_arg(l)));
313 }
314 
315 void tcg_gen_mb(TCGBar mb_type)
316 {
317 #ifdef CONFIG_USER_ONLY
318     bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
319 #else
320     /*
321      * It is tempting to elide the barrier in a uniprocessor context.
322      * However, even with a single cpu we have i/o threads running in
323      * parallel, and lack of memory order can result in e.g. virtio
324      * queue entries being read incorrectly.
325      */
326     bool parallel = true;
327 #endif
328 
329     if (parallel) {
330         tcg_gen_op1(INDEX_op_mb, 0, mb_type);
331     }
332 }
333 
334 void tcg_gen_plugin_cb(unsigned from)
335 {
336     tcg_gen_op1(INDEX_op_plugin_cb, 0, from);
337 }
338 
339 void tcg_gen_plugin_mem_cb(TCGv_i64 addr, unsigned meminfo)
340 {
341     tcg_gen_op2(INDEX_op_plugin_mem_cb, 0, tcgv_i64_arg(addr), meminfo);
342 }
343 
344 /* 32 bit ops */
345 
346 void tcg_gen_discard_i32(TCGv_i32 arg)
347 {
348     tcg_gen_op1_i32(INDEX_op_discard, TCG_TYPE_I32, arg);
349 }
350 
351 void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
352 {
353     if (ret != arg) {
354         tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
355     }
356 }
357 
358 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
359 {
360     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
361 }
362 
363 void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
364 {
365     tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
366 }
367 
368 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
369 {
370     /* some cases can be optimized here */
371     if (arg2 == 0) {
372         tcg_gen_mov_i32(ret, arg1);
373     } else {
374         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
375     }
376 }
377 
378 void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
379 {
380     tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
381 }
382 
383 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
384 {
385     if (arg1 == 0) {
386         tcg_gen_neg_i32(ret, arg2);
387     } else {
388         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
389     }
390 }
391 
392 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
393 {
394     tcg_gen_addi_i32(ret, arg1, -arg2);
395 }
396 
397 void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
398 {
399     tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
400 }
401 
402 void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
403 {
404     tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
405 }
406 
407 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
408 {
409     /* Some cases can be optimized here.  */
410     switch (arg2) {
411     case 0:
412         tcg_gen_movi_i32(ret, 0);
413         return;
414     case -1:
415         tcg_gen_mov_i32(ret, arg1);
416         return;
417     case 0xff:
418         /* Don't recurse with tcg_gen_ext8u_i32.  */
419         if (TCG_TARGET_HAS_ext8u_i32) {
420             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
421             return;
422         }
423         break;
424     case 0xffff:
425         if (TCG_TARGET_HAS_ext16u_i32) {
426             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
427             return;
428         }
429         break;
430     }
431 
432     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
433 }
434 
435 void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
436 {
437     tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
438 }
439 
440 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
441 {
442     /* Some cases can be optimized here.  */
443     if (arg2 == -1) {
444         tcg_gen_movi_i32(ret, -1);
445     } else if (arg2 == 0) {
446         tcg_gen_mov_i32(ret, arg1);
447     } else {
448         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
449     }
450 }
451 
452 void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
453 {
454     tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
455 }
456 
457 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
458 {
459     /* Some cases can be optimized here.  */
460     if (arg2 == 0) {
461         tcg_gen_mov_i32(ret, arg1);
462     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
463         /* Don't recurse with tcg_gen_not_i32.  */
464         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
465     } else {
466         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
467     }
468 }
469 
470 void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
471 {
472     if (TCG_TARGET_HAS_not_i32) {
473         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
474     } else {
475         tcg_gen_xori_i32(ret, arg, -1);
476     }
477 }
478 
479 void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
480 {
481     tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
482 }
483 
484 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
485 {
486     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
487     if (arg2 == 0) {
488         tcg_gen_mov_i32(ret, arg1);
489     } else {
490         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
491     }
492 }
493 
494 void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
495 {
496     tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
497 }
498 
499 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
500 {
501     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
502     if (arg2 == 0) {
503         tcg_gen_mov_i32(ret, arg1);
504     } else {
505         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
506     }
507 }
508 
509 void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
510 {
511     tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
512 }
513 
514 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
515 {
516     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
517     if (arg2 == 0) {
518         tcg_gen_mov_i32(ret, arg1);
519     } else {
520         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
521     }
522 }
523 
524 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
525 {
526     if (cond == TCG_COND_ALWAYS) {
527         tcg_gen_br(l);
528     } else if (cond != TCG_COND_NEVER) {
529         TCGOp *op = tcg_gen_op4ii_i32(INDEX_op_brcond_i32,
530                                       arg1, arg2, cond, label_arg(l));
531         add_as_label_use(l, op);
532     }
533 }
534 
535 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
536 {
537     if (cond == TCG_COND_ALWAYS) {
538         tcg_gen_br(l);
539     } else if (cond != TCG_COND_NEVER) {
540         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
541     }
542 }
543 
544 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
545                          TCGv_i32 arg1, TCGv_i32 arg2)
546 {
547     if (cond == TCG_COND_ALWAYS) {
548         tcg_gen_movi_i32(ret, 1);
549     } else if (cond == TCG_COND_NEVER) {
550         tcg_gen_movi_i32(ret, 0);
551     } else {
552         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
553     }
554 }
555 
556 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
557                           TCGv_i32 arg1, int32_t arg2)
558 {
559     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
560 }
561 
562 void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
563                             TCGv_i32 arg1, TCGv_i32 arg2)
564 {
565     if (cond == TCG_COND_ALWAYS) {
566         tcg_gen_movi_i32(ret, -1);
567     } else if (cond == TCG_COND_NEVER) {
568         tcg_gen_movi_i32(ret, 0);
569     } else if (TCG_TARGET_HAS_negsetcond_i32) {
570         tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
571     } else {
572         tcg_gen_setcond_i32(cond, ret, arg1, arg2);
573         tcg_gen_neg_i32(ret, ret);
574     }
575 }
576 
577 void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
578                              TCGv_i32 arg1, int32_t arg2)
579 {
580     tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
581 }
582 
583 void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
584 {
585     tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
586 }
587 
588 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
589 {
590     if (arg2 == 0) {
591         tcg_gen_movi_i32(ret, 0);
592     } else if (is_power_of_2(arg2)) {
593         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
594     } else {
595         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
596     }
597 }
598 
599 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
600 {
601     if (TCG_TARGET_HAS_div_i32) {
602         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
603     } else if (TCG_TARGET_HAS_div2_i32) {
604         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
605         tcg_gen_sari_i32(t0, arg1, 31);
606         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
607         tcg_temp_free_i32(t0);
608     } else {
609         gen_helper_div_i32(ret, arg1, arg2);
610     }
611 }
612 
613 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
614 {
615     if (TCG_TARGET_HAS_rem_i32) {
616         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
617     } else if (TCG_TARGET_HAS_div_i32) {
618         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
619         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
620         tcg_gen_mul_i32(t0, t0, arg2);
621         tcg_gen_sub_i32(ret, arg1, t0);
622         tcg_temp_free_i32(t0);
623     } else if (TCG_TARGET_HAS_div2_i32) {
624         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
625         tcg_gen_sari_i32(t0, arg1, 31);
626         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
627         tcg_temp_free_i32(t0);
628     } else {
629         gen_helper_rem_i32(ret, arg1, arg2);
630     }
631 }
632 
633 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
634 {
635     if (TCG_TARGET_HAS_div_i32) {
636         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
637     } else if (TCG_TARGET_HAS_div2_i32) {
638         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
639         TCGv_i32 zero = tcg_constant_i32(0);
640         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2);
641         tcg_temp_free_i32(t0);
642     } else {
643         gen_helper_divu_i32(ret, arg1, arg2);
644     }
645 }
646 
647 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
648 {
649     if (TCG_TARGET_HAS_rem_i32) {
650         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
651     } else if (TCG_TARGET_HAS_div_i32) {
652         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
653         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
654         tcg_gen_mul_i32(t0, t0, arg2);
655         tcg_gen_sub_i32(ret, arg1, t0);
656         tcg_temp_free_i32(t0);
657     } else if (TCG_TARGET_HAS_div2_i32) {
658         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
659         TCGv_i32 zero = tcg_constant_i32(0);
660         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2);
661         tcg_temp_free_i32(t0);
662     } else {
663         gen_helper_remu_i32(ret, arg1, arg2);
664     }
665 }
666 
667 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
668 {
669     if (TCG_TARGET_HAS_andc_i32) {
670         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
671     } else {
672         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
673         tcg_gen_not_i32(t0, arg2);
674         tcg_gen_and_i32(ret, arg1, t0);
675         tcg_temp_free_i32(t0);
676     }
677 }
678 
679 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
680 {
681     if (TCG_TARGET_HAS_eqv_i32) {
682         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
683     } else {
684         tcg_gen_xor_i32(ret, arg1, arg2);
685         tcg_gen_not_i32(ret, ret);
686     }
687 }
688 
689 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
690 {
691     if (TCG_TARGET_HAS_nand_i32) {
692         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
693     } else {
694         tcg_gen_and_i32(ret, arg1, arg2);
695         tcg_gen_not_i32(ret, ret);
696     }
697 }
698 
699 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
700 {
701     if (TCG_TARGET_HAS_nor_i32) {
702         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
703     } else {
704         tcg_gen_or_i32(ret, arg1, arg2);
705         tcg_gen_not_i32(ret, ret);
706     }
707 }
708 
709 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
710 {
711     if (TCG_TARGET_HAS_orc_i32) {
712         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
713     } else {
714         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
715         tcg_gen_not_i32(t0, arg2);
716         tcg_gen_or_i32(ret, arg1, t0);
717         tcg_temp_free_i32(t0);
718     }
719 }
720 
721 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
722 {
723     if (TCG_TARGET_HAS_clz_i32) {
724         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
725     } else if (TCG_TARGET_HAS_clz_i64) {
726         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
727         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
728         tcg_gen_extu_i32_i64(t1, arg1);
729         tcg_gen_extu_i32_i64(t2, arg2);
730         tcg_gen_addi_i64(t2, t2, 32);
731         tcg_gen_clz_i64(t1, t1, t2);
732         tcg_gen_extrl_i64_i32(ret, t1);
733         tcg_temp_free_i64(t1);
734         tcg_temp_free_i64(t2);
735         tcg_gen_subi_i32(ret, ret, 32);
736     } else {
737         gen_helper_clz_i32(ret, arg1, arg2);
738     }
739 }
740 
741 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
742 {
743     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
744 }
745 
746 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
747 {
748     if (TCG_TARGET_HAS_ctz_i32) {
749         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
750     } else if (TCG_TARGET_HAS_ctz_i64) {
751         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
752         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
753         tcg_gen_extu_i32_i64(t1, arg1);
754         tcg_gen_extu_i32_i64(t2, arg2);
755         tcg_gen_ctz_i64(t1, t1, t2);
756         tcg_gen_extrl_i64_i32(ret, t1);
757         tcg_temp_free_i64(t1);
758         tcg_temp_free_i64(t2);
759     } else if (TCG_TARGET_HAS_ctpop_i32
760                || TCG_TARGET_HAS_ctpop_i64
761                || TCG_TARGET_HAS_clz_i32
762                || TCG_TARGET_HAS_clz_i64) {
763         TCGv_i32 z, t = tcg_temp_ebb_new_i32();
764 
765         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
766             tcg_gen_subi_i32(t, arg1, 1);
767             tcg_gen_andc_i32(t, t, arg1);
768             tcg_gen_ctpop_i32(t, t);
769         } else {
770             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
771             tcg_gen_neg_i32(t, arg1);
772             tcg_gen_and_i32(t, t, arg1);
773             tcg_gen_clzi_i32(t, t, 32);
774             tcg_gen_xori_i32(t, t, 31);
775         }
776         z = tcg_constant_i32(0);
777         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
778         tcg_temp_free_i32(t);
779     } else {
780         gen_helper_ctz_i32(ret, arg1, arg2);
781     }
782 }
783 
784 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
785 {
786     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
787         /* This equivalence has the advantage of not requiring a fixup.  */
788         TCGv_i32 t = tcg_temp_ebb_new_i32();
789         tcg_gen_subi_i32(t, arg1, 1);
790         tcg_gen_andc_i32(t, t, arg1);
791         tcg_gen_ctpop_i32(ret, t);
792         tcg_temp_free_i32(t);
793     } else {
794         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
795     }
796 }
797 
798 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
799 {
800     if (TCG_TARGET_HAS_clz_i32) {
801         TCGv_i32 t = tcg_temp_ebb_new_i32();
802         tcg_gen_sari_i32(t, arg, 31);
803         tcg_gen_xor_i32(t, t, arg);
804         tcg_gen_clzi_i32(t, t, 32);
805         tcg_gen_subi_i32(ret, t, 1);
806         tcg_temp_free_i32(t);
807     } else {
808         gen_helper_clrsb_i32(ret, arg);
809     }
810 }
811 
812 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
813 {
814     if (TCG_TARGET_HAS_ctpop_i32) {
815         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
816     } else if (TCG_TARGET_HAS_ctpop_i64) {
817         TCGv_i64 t = tcg_temp_ebb_new_i64();
818         tcg_gen_extu_i32_i64(t, arg1);
819         tcg_gen_ctpop_i64(t, t);
820         tcg_gen_extrl_i64_i32(ret, t);
821         tcg_temp_free_i64(t);
822     } else {
823         gen_helper_ctpop_i32(ret, arg1);
824     }
825 }
826 
827 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
828 {
829     if (TCG_TARGET_HAS_rot_i32) {
830         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
831     } else {
832         TCGv_i32 t0, t1;
833 
834         t0 = tcg_temp_ebb_new_i32();
835         t1 = tcg_temp_ebb_new_i32();
836         tcg_gen_shl_i32(t0, arg1, arg2);
837         tcg_gen_subfi_i32(t1, 32, arg2);
838         tcg_gen_shr_i32(t1, arg1, t1);
839         tcg_gen_or_i32(ret, t0, t1);
840         tcg_temp_free_i32(t0);
841         tcg_temp_free_i32(t1);
842     }
843 }
844 
845 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
846 {
847     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
848     /* some cases can be optimized here */
849     if (arg2 == 0) {
850         tcg_gen_mov_i32(ret, arg1);
851     } else if (TCG_TARGET_HAS_rot_i32) {
852         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
853     } else {
854         TCGv_i32 t0, t1;
855         t0 = tcg_temp_ebb_new_i32();
856         t1 = tcg_temp_ebb_new_i32();
857         tcg_gen_shli_i32(t0, arg1, arg2);
858         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
859         tcg_gen_or_i32(ret, t0, t1);
860         tcg_temp_free_i32(t0);
861         tcg_temp_free_i32(t1);
862     }
863 }
864 
865 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
866 {
867     if (TCG_TARGET_HAS_rot_i32) {
868         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
869     } else {
870         TCGv_i32 t0, t1;
871 
872         t0 = tcg_temp_ebb_new_i32();
873         t1 = tcg_temp_ebb_new_i32();
874         tcg_gen_shr_i32(t0, arg1, arg2);
875         tcg_gen_subfi_i32(t1, 32, arg2);
876         tcg_gen_shl_i32(t1, arg1, t1);
877         tcg_gen_or_i32(ret, t0, t1);
878         tcg_temp_free_i32(t0);
879         tcg_temp_free_i32(t1);
880     }
881 }
882 
883 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
884 {
885     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
886     /* some cases can be optimized here */
887     if (arg2 == 0) {
888         tcg_gen_mov_i32(ret, arg1);
889     } else {
890         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
891     }
892 }
893 
894 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
895                          unsigned int ofs, unsigned int len)
896 {
897     uint32_t mask;
898     TCGv_i32 t1;
899 
900     tcg_debug_assert(ofs < 32);
901     tcg_debug_assert(len > 0);
902     tcg_debug_assert(len <= 32);
903     tcg_debug_assert(ofs + len <= 32);
904 
905     if (len == 32) {
906         tcg_gen_mov_i32(ret, arg2);
907         return;
908     }
909     if (TCG_TARGET_deposit_valid(TCG_TYPE_I32, ofs, len)) {
910         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
911         return;
912     }
913 
914     t1 = tcg_temp_ebb_new_i32();
915 
916     if (TCG_TARGET_HAS_extract2_i32) {
917         if (ofs + len == 32) {
918             tcg_gen_shli_i32(t1, arg1, len);
919             tcg_gen_extract2_i32(ret, t1, arg2, len);
920             goto done;
921         }
922         if (ofs == 0) {
923             tcg_gen_extract2_i32(ret, arg1, arg2, len);
924             tcg_gen_rotli_i32(ret, ret, len);
925             goto done;
926         }
927     }
928 
929     mask = (1u << len) - 1;
930     if (ofs + len < 32) {
931         tcg_gen_andi_i32(t1, arg2, mask);
932         tcg_gen_shli_i32(t1, t1, ofs);
933     } else {
934         tcg_gen_shli_i32(t1, arg2, ofs);
935     }
936     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
937     tcg_gen_or_i32(ret, ret, t1);
938  done:
939     tcg_temp_free_i32(t1);
940 }
941 
942 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
943                            unsigned int ofs, unsigned int len)
944 {
945     tcg_debug_assert(ofs < 32);
946     tcg_debug_assert(len > 0);
947     tcg_debug_assert(len <= 32);
948     tcg_debug_assert(ofs + len <= 32);
949 
950     if (ofs + len == 32) {
951         tcg_gen_shli_i32(ret, arg, ofs);
952     } else if (ofs == 0) {
953         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
954     } else if (TCG_TARGET_deposit_valid(TCG_TYPE_I32, ofs, len)) {
955         TCGv_i32 zero = tcg_constant_i32(0);
956         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
957     } else {
958         /* To help two-operand hosts we prefer to zero-extend first,
959            which allows ARG to stay live.  */
960         switch (len) {
961         case 16:
962             if (TCG_TARGET_HAS_ext16u_i32) {
963                 tcg_gen_ext16u_i32(ret, arg);
964                 tcg_gen_shli_i32(ret, ret, ofs);
965                 return;
966             }
967             break;
968         case 8:
969             if (TCG_TARGET_HAS_ext8u_i32) {
970                 tcg_gen_ext8u_i32(ret, arg);
971                 tcg_gen_shli_i32(ret, ret, ofs);
972                 return;
973             }
974             break;
975         }
976         /* Otherwise prefer zero-extension over AND for code size.  */
977         switch (ofs + len) {
978         case 16:
979             if (TCG_TARGET_HAS_ext16u_i32) {
980                 tcg_gen_shli_i32(ret, arg, ofs);
981                 tcg_gen_ext16u_i32(ret, ret);
982                 return;
983             }
984             break;
985         case 8:
986             if (TCG_TARGET_HAS_ext8u_i32) {
987                 tcg_gen_shli_i32(ret, arg, ofs);
988                 tcg_gen_ext8u_i32(ret, ret);
989                 return;
990             }
991             break;
992         }
993         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
994         tcg_gen_shli_i32(ret, ret, ofs);
995     }
996 }
997 
998 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
999                          unsigned int ofs, unsigned int len)
1000 {
1001     tcg_debug_assert(ofs < 32);
1002     tcg_debug_assert(len > 0);
1003     tcg_debug_assert(len <= 32);
1004     tcg_debug_assert(ofs + len <= 32);
1005 
1006     /* Canonicalize certain special cases, even if extract is supported.  */
1007     if (ofs + len == 32) {
1008         tcg_gen_shri_i32(ret, arg, 32 - len);
1009         return;
1010     }
1011     if (ofs == 0) {
1012         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
1013         return;
1014     }
1015 
1016     if (TCG_TARGET_extract_valid(TCG_TYPE_I32, ofs, len)) {
1017         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
1018         return;
1019     }
1020 
1021     /* Assume that zero-extension, if available, is cheaper than a shift.  */
1022     switch (ofs + len) {
1023     case 16:
1024         if (TCG_TARGET_HAS_ext16u_i32) {
1025             tcg_gen_ext16u_i32(ret, arg);
1026             tcg_gen_shri_i32(ret, ret, ofs);
1027             return;
1028         }
1029         break;
1030     case 8:
1031         if (TCG_TARGET_HAS_ext8u_i32) {
1032             tcg_gen_ext8u_i32(ret, arg);
1033             tcg_gen_shri_i32(ret, ret, ofs);
1034             return;
1035         }
1036         break;
1037     }
1038 
1039     /* ??? Ideally we'd know what values are available for immediate AND.
1040        Assume that 8 bits are available, plus the special case of 16,
1041        so that we get ext8u, ext16u.  */
1042     switch (len) {
1043     case 1 ... 8: case 16:
1044         tcg_gen_shri_i32(ret, arg, ofs);
1045         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
1046         break;
1047     default:
1048         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1049         tcg_gen_shri_i32(ret, ret, 32 - len);
1050         break;
1051     }
1052 }
1053 
1054 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
1055                           unsigned int ofs, unsigned int len)
1056 {
1057     tcg_debug_assert(ofs < 32);
1058     tcg_debug_assert(len > 0);
1059     tcg_debug_assert(len <= 32);
1060     tcg_debug_assert(ofs + len <= 32);
1061 
1062     /* Canonicalize certain special cases, even if extract is supported.  */
1063     if (ofs + len == 32) {
1064         tcg_gen_sari_i32(ret, arg, 32 - len);
1065         return;
1066     }
1067     if (ofs == 0) {
1068         switch (len) {
1069         case 16:
1070             tcg_gen_ext16s_i32(ret, arg);
1071             return;
1072         case 8:
1073             tcg_gen_ext8s_i32(ret, arg);
1074             return;
1075         }
1076     }
1077 
1078     if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, ofs, len)) {
1079         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
1080         return;
1081     }
1082 
1083     /* Assume that sign-extension, if available, is cheaper than a shift.  */
1084     switch (ofs + len) {
1085     case 16:
1086         if (TCG_TARGET_HAS_ext16s_i32) {
1087             tcg_gen_ext16s_i32(ret, arg);
1088             tcg_gen_sari_i32(ret, ret, ofs);
1089             return;
1090         }
1091         break;
1092     case 8:
1093         if (TCG_TARGET_HAS_ext8s_i32) {
1094             tcg_gen_ext8s_i32(ret, arg);
1095             tcg_gen_sari_i32(ret, ret, ofs);
1096             return;
1097         }
1098         break;
1099     }
1100     switch (len) {
1101     case 16:
1102         if (TCG_TARGET_HAS_ext16s_i32) {
1103             tcg_gen_shri_i32(ret, arg, ofs);
1104             tcg_gen_ext16s_i32(ret, ret);
1105             return;
1106         }
1107         break;
1108     case 8:
1109         if (TCG_TARGET_HAS_ext8s_i32) {
1110             tcg_gen_shri_i32(ret, arg, ofs);
1111             tcg_gen_ext8s_i32(ret, ret);
1112             return;
1113         }
1114         break;
1115     }
1116 
1117     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
1118     tcg_gen_sari_i32(ret, ret, 32 - len);
1119 }
1120 
1121 /*
1122  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1123  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1124  */
1125 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
1126                           unsigned int ofs)
1127 {
1128     tcg_debug_assert(ofs <= 32);
1129     if (ofs == 0) {
1130         tcg_gen_mov_i32(ret, al);
1131     } else if (ofs == 32) {
1132         tcg_gen_mov_i32(ret, ah);
1133     } else if (al == ah) {
1134         tcg_gen_rotri_i32(ret, al, ofs);
1135     } else if (TCG_TARGET_HAS_extract2_i32) {
1136         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
1137     } else {
1138         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1139         tcg_gen_shri_i32(t0, al, ofs);
1140         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
1141         tcg_temp_free_i32(t0);
1142     }
1143 }
1144 
1145 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
1146                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
1147 {
1148     if (cond == TCG_COND_ALWAYS) {
1149         tcg_gen_mov_i32(ret, v1);
1150     } else if (cond == TCG_COND_NEVER) {
1151         tcg_gen_mov_i32(ret, v2);
1152     } else {
1153         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
1154     }
1155 }
1156 
1157 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1158                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1159 {
1160     if (TCG_TARGET_HAS_add2_i32) {
1161         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
1162     } else {
1163         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1164         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1165         tcg_gen_concat_i32_i64(t0, al, ah);
1166         tcg_gen_concat_i32_i64(t1, bl, bh);
1167         tcg_gen_add_i64(t0, t0, t1);
1168         tcg_gen_extr_i64_i32(rl, rh, t0);
1169         tcg_temp_free_i64(t0);
1170         tcg_temp_free_i64(t1);
1171     }
1172 }
1173 
1174 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
1175                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
1176 {
1177     if (TCG_TARGET_HAS_sub2_i32) {
1178         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
1179     } else {
1180         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1181         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1182         tcg_gen_concat_i32_i64(t0, al, ah);
1183         tcg_gen_concat_i32_i64(t1, bl, bh);
1184         tcg_gen_sub_i64(t0, t0, t1);
1185         tcg_gen_extr_i64_i32(rl, rh, t0);
1186         tcg_temp_free_i64(t0);
1187         tcg_temp_free_i64(t1);
1188     }
1189 }
1190 
1191 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1192 {
1193     if (TCG_TARGET_HAS_mulu2_i32) {
1194         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
1195     } else if (TCG_TARGET_HAS_muluh_i32) {
1196         TCGv_i32 t = tcg_temp_ebb_new_i32();
1197         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1198         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
1199         tcg_gen_mov_i32(rl, t);
1200         tcg_temp_free_i32(t);
1201     } else if (TCG_TARGET_REG_BITS == 64) {
1202         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1203         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1204         tcg_gen_extu_i32_i64(t0, arg1);
1205         tcg_gen_extu_i32_i64(t1, arg2);
1206         tcg_gen_mul_i64(t0, t0, t1);
1207         tcg_gen_extr_i64_i32(rl, rh, t0);
1208         tcg_temp_free_i64(t0);
1209         tcg_temp_free_i64(t1);
1210     } else {
1211         qemu_build_not_reached();
1212     }
1213 }
1214 
1215 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1216 {
1217     if (TCG_TARGET_HAS_muls2_i32) {
1218         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
1219     } else if (TCG_TARGET_HAS_mulsh_i32) {
1220         TCGv_i32 t = tcg_temp_ebb_new_i32();
1221         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
1222         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
1223         tcg_gen_mov_i32(rl, t);
1224         tcg_temp_free_i32(t);
1225     } else if (TCG_TARGET_REG_BITS == 32) {
1226         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1227         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1228         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1229         TCGv_i32 t3 = tcg_temp_ebb_new_i32();
1230         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1231         /* Adjust for negative inputs.  */
1232         tcg_gen_sari_i32(t2, arg1, 31);
1233         tcg_gen_sari_i32(t3, arg2, 31);
1234         tcg_gen_and_i32(t2, t2, arg2);
1235         tcg_gen_and_i32(t3, t3, arg1);
1236         tcg_gen_sub_i32(rh, t1, t2);
1237         tcg_gen_sub_i32(rh, rh, t3);
1238         tcg_gen_mov_i32(rl, t0);
1239         tcg_temp_free_i32(t0);
1240         tcg_temp_free_i32(t1);
1241         tcg_temp_free_i32(t2);
1242         tcg_temp_free_i32(t3);
1243     } else {
1244         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1245         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1246         tcg_gen_ext_i32_i64(t0, arg1);
1247         tcg_gen_ext_i32_i64(t1, arg2);
1248         tcg_gen_mul_i64(t0, t0, t1);
1249         tcg_gen_extr_i64_i32(rl, rh, t0);
1250         tcg_temp_free_i64(t0);
1251         tcg_temp_free_i64(t1);
1252     }
1253 }
1254 
1255 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
1256 {
1257     if (TCG_TARGET_REG_BITS == 32) {
1258         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1259         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1260         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
1261         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
1262         /* Adjust for negative input for the signed arg1.  */
1263         tcg_gen_sari_i32(t2, arg1, 31);
1264         tcg_gen_and_i32(t2, t2, arg2);
1265         tcg_gen_sub_i32(rh, t1, t2);
1266         tcg_gen_mov_i32(rl, t0);
1267         tcg_temp_free_i32(t0);
1268         tcg_temp_free_i32(t1);
1269         tcg_temp_free_i32(t2);
1270     } else {
1271         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1272         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1273         tcg_gen_ext_i32_i64(t0, arg1);
1274         tcg_gen_extu_i32_i64(t1, arg2);
1275         tcg_gen_mul_i64(t0, t0, t1);
1276         tcg_gen_extr_i64_i32(rl, rh, t0);
1277         tcg_temp_free_i64(t0);
1278         tcg_temp_free_i64(t1);
1279     }
1280 }
1281 
1282 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1283 {
1284     if (TCG_TARGET_HAS_ext8s_i32) {
1285         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1286     } else {
1287         tcg_gen_shli_i32(ret, arg, 24);
1288         tcg_gen_sari_i32(ret, ret, 24);
1289     }
1290 }
1291 
1292 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1293 {
1294     if (TCG_TARGET_HAS_ext16s_i32) {
1295         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1296     } else {
1297         tcg_gen_shli_i32(ret, arg, 16);
1298         tcg_gen_sari_i32(ret, ret, 16);
1299     }
1300 }
1301 
1302 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1303 {
1304     if (TCG_TARGET_HAS_ext8u_i32) {
1305         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1306     } else {
1307         tcg_gen_andi_i32(ret, arg, 0xffu);
1308     }
1309 }
1310 
1311 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1312 {
1313     if (TCG_TARGET_HAS_ext16u_i32) {
1314         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1315     } else {
1316         tcg_gen_andi_i32(ret, arg, 0xffffu);
1317     }
1318 }
1319 
1320 /*
1321  * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1322  *
1323  * Byte pattern: xxab -> yyba
1324  *
1325  * With TCG_BSWAP_IZ, x == zero, else undefined.
1326  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1327  */
1328 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1329 {
1330     /* Only one extension flag may be present. */
1331     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1332 
1333     if (TCG_TARGET_HAS_bswap16_i32) {
1334         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1335     } else {
1336         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1337         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1338 
1339                                             /* arg = ..ab (IZ) xxab (!IZ) */
1340         tcg_gen_shri_i32(t0, arg, 8);       /*  t0 = ...a (IZ) .xxa (!IZ) */
1341         if (!(flags & TCG_BSWAP_IZ)) {
1342             tcg_gen_ext8u_i32(t0, t0);      /*  t0 = ...a */
1343         }
1344 
1345         if (flags & TCG_BSWAP_OS) {
1346             tcg_gen_shli_i32(t1, arg, 24);  /*  t1 = b... */
1347             tcg_gen_sari_i32(t1, t1, 16);   /*  t1 = ssb. */
1348         } else if (flags & TCG_BSWAP_OZ) {
1349             tcg_gen_ext8u_i32(t1, arg);     /*  t1 = ...b */
1350             tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = ..b. */
1351         } else {
1352             tcg_gen_shli_i32(t1, arg, 8);   /*  t1 = xab. */
1353         }
1354 
1355         tcg_gen_or_i32(ret, t0, t1);        /* ret = ..ba (OZ) */
1356                                             /*     = ssba (OS) */
1357                                             /*     = xaba (no flag) */
1358         tcg_temp_free_i32(t0);
1359         tcg_temp_free_i32(t1);
1360     }
1361 }
1362 
1363 /*
1364  * bswap32_i32: 32-bit byte swap on a 32-bit value.
1365  *
1366  * Byte pattern: abcd -> dcba
1367  */
1368 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1369 {
1370     if (TCG_TARGET_HAS_bswap32_i32) {
1371         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1372     } else {
1373         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1374         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1375         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1376 
1377                                         /* arg = abcd */
1378         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1379         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1380         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1381         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1382         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1383 
1384         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1385         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1386         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1387 
1388         tcg_temp_free_i32(t0);
1389         tcg_temp_free_i32(t1);
1390     }
1391 }
1392 
1393 /*
1394  * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1395  *
1396  * Byte pattern: abcd -> cdab
1397  */
1398 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1399 {
1400     /* Swapping 2 16-bit elements is a rotate. */
1401     tcg_gen_rotli_i32(ret, arg, 16);
1402 }
1403 
1404 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1405 {
1406     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1407 }
1408 
1409 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1410 {
1411     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1412 }
1413 
1414 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1415 {
1416     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1417 }
1418 
1419 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1420 {
1421     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1422 }
1423 
1424 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1425 {
1426     TCGv_i32 t = tcg_temp_ebb_new_i32();
1427 
1428     tcg_gen_sari_i32(t, a, 31);
1429     tcg_gen_xor_i32(ret, a, t);
1430     tcg_gen_sub_i32(ret, ret, t);
1431     tcg_temp_free_i32(t);
1432 }
1433 
1434 void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1435 {
1436     tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
1437 }
1438 
1439 void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1440 {
1441     tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
1442 }
1443 
1444 void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1445 {
1446     tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
1447 }
1448 
1449 void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1450 {
1451     tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
1452 }
1453 
1454 void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
1455 {
1456     tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
1457 }
1458 
1459 void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1460 {
1461     tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
1462 }
1463 
1464 void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1465 {
1466     tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
1467 }
1468 
1469 void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
1470 {
1471     tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
1472 }
1473 
1474 
1475 /* 64-bit ops */
1476 
1477 void tcg_gen_discard_i64(TCGv_i64 arg)
1478 {
1479     if (TCG_TARGET_REG_BITS == 64) {
1480         tcg_gen_op1_i64(INDEX_op_discard, TCG_TYPE_I64, arg);
1481     } else {
1482         tcg_gen_discard_i32(TCGV_LOW(arg));
1483         tcg_gen_discard_i32(TCGV_HIGH(arg));
1484     }
1485 }
1486 
1487 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1488 {
1489     if (ret == arg) {
1490         return;
1491     }
1492     if (TCG_TARGET_REG_BITS == 64) {
1493         tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1494     } else {
1495         TCGTemp *ts = tcgv_i64_temp(arg);
1496 
1497         /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1498         if (ts->kind == TEMP_CONST) {
1499             tcg_gen_movi_i64(ret, ts->val);
1500         } else {
1501             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1502             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1503         }
1504     }
1505 }
1506 
1507 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1508 {
1509     if (TCG_TARGET_REG_BITS == 64) {
1510         tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1511     } else {
1512         tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1513         tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1514     }
1515 }
1516 
1517 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1518 {
1519     if (TCG_TARGET_REG_BITS == 64) {
1520         tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1521     } else {
1522         tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1523         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1524     }
1525 }
1526 
1527 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1528 {
1529     if (TCG_TARGET_REG_BITS == 64) {
1530         tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1531     } else {
1532         tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1533         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1534     }
1535 }
1536 
1537 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1538 {
1539     if (TCG_TARGET_REG_BITS == 64) {
1540         tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1541     } else {
1542         tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1543         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1544     }
1545 }
1546 
1547 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1548 {
1549     if (TCG_TARGET_REG_BITS == 64) {
1550         tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1551     } else {
1552         tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1553         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1554     }
1555 }
1556 
1557 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1558 {
1559     if (TCG_TARGET_REG_BITS == 64) {
1560         tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1561     } else {
1562         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1563         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1564     }
1565 }
1566 
1567 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1568 {
1569     if (TCG_TARGET_REG_BITS == 64) {
1570         tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1571     } else {
1572         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1573         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1574     }
1575 }
1576 
1577 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1578 {
1579     /*
1580      * For 32-bit host, since arg2 and ret have different types,
1581      * they cannot be the same temporary -- no chance of overlap.
1582      */
1583     if (TCG_TARGET_REG_BITS == 64) {
1584         tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1585     } else if (HOST_BIG_ENDIAN) {
1586         tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1587         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1588     } else {
1589         tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1590         tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1591     }
1592 }
1593 
1594 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1595 {
1596     if (TCG_TARGET_REG_BITS == 64) {
1597         tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1598     } else {
1599         tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1600     }
1601 }
1602 
1603 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1604 {
1605     if (TCG_TARGET_REG_BITS == 64) {
1606         tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1607     } else {
1608         tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1609     }
1610 }
1611 
1612 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1613 {
1614     if (TCG_TARGET_REG_BITS == 64) {
1615         tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1616     } else {
1617         tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1618     }
1619 }
1620 
1621 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1622 {
1623     if (TCG_TARGET_REG_BITS == 64) {
1624         tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1625     } else if (HOST_BIG_ENDIAN) {
1626         tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1627         tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1628     } else {
1629         tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1630         tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1631     }
1632 }
1633 
1634 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1635 {
1636     if (TCG_TARGET_REG_BITS == 64) {
1637         tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1638     } else {
1639         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1640                          TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1641     }
1642 }
1643 
1644 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1645 {
1646     if (TCG_TARGET_REG_BITS == 64) {
1647         tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1648     } else {
1649         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1650                          TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1651     }
1652 }
1653 
1654 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1655 {
1656     if (TCG_TARGET_REG_BITS == 64) {
1657         tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1658     } else {
1659         tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1660         tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1661     }
1662 }
1663 
1664 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1665 {
1666     if (TCG_TARGET_REG_BITS == 64) {
1667         tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1668     } else {
1669         tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1670         tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1671     }
1672 }
1673 
1674 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1675 {
1676     if (TCG_TARGET_REG_BITS == 64) {
1677         tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1678     } else {
1679         tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1680         tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1681     }
1682 }
1683 
1684 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1685 {
1686     if (TCG_TARGET_REG_BITS == 64) {
1687         tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1688     } else {
1689         gen_helper_shl_i64(ret, arg1, arg2);
1690     }
1691 }
1692 
1693 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1694 {
1695     if (TCG_TARGET_REG_BITS == 64) {
1696         tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1697     } else {
1698         gen_helper_shr_i64(ret, arg1, arg2);
1699     }
1700 }
1701 
1702 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1703 {
1704     if (TCG_TARGET_REG_BITS == 64) {
1705         tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1706     } else {
1707         gen_helper_sar_i64(ret, arg1, arg2);
1708     }
1709 }
1710 
1711 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1712 {
1713     TCGv_i64 t0;
1714     TCGv_i32 t1;
1715 
1716     if (TCG_TARGET_REG_BITS == 64) {
1717         tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1718         return;
1719     }
1720 
1721 
1722     t0 = tcg_temp_ebb_new_i64();
1723     t1 = tcg_temp_ebb_new_i32();
1724 
1725     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1726                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1727 
1728     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1729     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1730     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1731     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1732 
1733     tcg_gen_mov_i64(ret, t0);
1734     tcg_temp_free_i64(t0);
1735     tcg_temp_free_i32(t1);
1736 }
1737 
1738 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1739 {
1740     /* some cases can be optimized here */
1741     if (arg2 == 0) {
1742         tcg_gen_mov_i64(ret, arg1);
1743     } else if (TCG_TARGET_REG_BITS == 64) {
1744         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1745     } else {
1746         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1747                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1748                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1749     }
1750 }
1751 
1752 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1753 {
1754     if (arg1 == 0) {
1755         tcg_gen_neg_i64(ret, arg2);
1756     } else if (TCG_TARGET_REG_BITS == 64) {
1757         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1758     } else {
1759         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1760                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1761                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1762     }
1763 }
1764 
1765 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1766 {
1767     tcg_gen_addi_i64(ret, arg1, -arg2);
1768 }
1769 
1770 void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1771 {
1772     if (TCG_TARGET_REG_BITS == 64) {
1773         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1774     } else {
1775         TCGv_i32 zero = tcg_constant_i32(0);
1776         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1777                          zero, zero, TCGV_LOW(arg), TCGV_HIGH(arg));
1778     }
1779 }
1780 
1781 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1782 {
1783     if (TCG_TARGET_REG_BITS == 32) {
1784         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1785         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1786         return;
1787     }
1788 
1789     /* Some cases can be optimized here.  */
1790     switch (arg2) {
1791     case 0:
1792         tcg_gen_movi_i64(ret, 0);
1793         return;
1794     case -1:
1795         tcg_gen_mov_i64(ret, arg1);
1796         return;
1797     case 0xff:
1798         /* Don't recurse with tcg_gen_ext8u_i64.  */
1799         if (TCG_TARGET_HAS_ext8u_i64) {
1800             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1801             return;
1802         }
1803         break;
1804     case 0xffff:
1805         if (TCG_TARGET_HAS_ext16u_i64) {
1806             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1807             return;
1808         }
1809         break;
1810     case 0xffffffffu:
1811         if (TCG_TARGET_HAS_ext32u_i64) {
1812             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1813             return;
1814         }
1815         break;
1816     }
1817 
1818     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1819 }
1820 
1821 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1822 {
1823     if (TCG_TARGET_REG_BITS == 32) {
1824         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1825         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1826         return;
1827     }
1828     /* Some cases can be optimized here.  */
1829     if (arg2 == -1) {
1830         tcg_gen_movi_i64(ret, -1);
1831     } else if (arg2 == 0) {
1832         tcg_gen_mov_i64(ret, arg1);
1833     } else {
1834         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1835     }
1836 }
1837 
1838 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1839 {
1840     if (TCG_TARGET_REG_BITS == 32) {
1841         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1842         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1843         return;
1844     }
1845     /* Some cases can be optimized here.  */
1846     if (arg2 == 0) {
1847         tcg_gen_mov_i64(ret, arg1);
1848     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1849         /* Don't recurse with tcg_gen_not_i64.  */
1850         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1851     } else {
1852         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1853     }
1854 }
1855 
1856 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1857                                       unsigned c, bool right, bool arith)
1858 {
1859     tcg_debug_assert(c < 64);
1860     if (c == 0) {
1861         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1862         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1863     } else if (c >= 32) {
1864         c -= 32;
1865         if (right) {
1866             if (arith) {
1867                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1868                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1869             } else {
1870                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1871                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1872             }
1873         } else {
1874             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1875             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1876         }
1877     } else if (right) {
1878         if (TCG_TARGET_HAS_extract2_i32) {
1879             tcg_gen_extract2_i32(TCGV_LOW(ret),
1880                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1881         } else {
1882             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1883             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1884                                 TCGV_HIGH(arg1), 32 - c, c);
1885         }
1886         if (arith) {
1887             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1888         } else {
1889             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1890         }
1891     } else {
1892         if (TCG_TARGET_HAS_extract2_i32) {
1893             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1894                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1895         } else {
1896             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1897             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1898             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1899                                 TCGV_HIGH(arg1), c, 32 - c);
1900             tcg_temp_free_i32(t0);
1901         }
1902         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1903     }
1904 }
1905 
1906 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1907 {
1908     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1909     if (TCG_TARGET_REG_BITS == 32) {
1910         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1911     } else if (arg2 == 0) {
1912         tcg_gen_mov_i64(ret, arg1);
1913     } else {
1914         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1915     }
1916 }
1917 
1918 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1919 {
1920     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1921     if (TCG_TARGET_REG_BITS == 32) {
1922         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1923     } else if (arg2 == 0) {
1924         tcg_gen_mov_i64(ret, arg1);
1925     } else {
1926         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1927     }
1928 }
1929 
1930 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1931 {
1932     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1933     if (TCG_TARGET_REG_BITS == 32) {
1934         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1935     } else if (arg2 == 0) {
1936         tcg_gen_mov_i64(ret, arg1);
1937     } else {
1938         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1939     }
1940 }
1941 
1942 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1943 {
1944     if (cond == TCG_COND_ALWAYS) {
1945         tcg_gen_br(l);
1946     } else if (cond != TCG_COND_NEVER) {
1947         TCGOp *op;
1948         if (TCG_TARGET_REG_BITS == 32) {
1949             op = tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1950                                    TCGV_HIGH(arg1), TCGV_LOW(arg2),
1951                                    TCGV_HIGH(arg2), cond, label_arg(l));
1952         } else {
1953             op = tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1954                                    label_arg(l));
1955         }
1956         add_as_label_use(l, op);
1957     }
1958 }
1959 
1960 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1961 {
1962     if (TCG_TARGET_REG_BITS == 64) {
1963         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1964     } else if (cond == TCG_COND_ALWAYS) {
1965         tcg_gen_br(l);
1966     } else if (cond != TCG_COND_NEVER) {
1967         TCGOp *op = tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1968                                       TCGV_LOW(arg1), TCGV_HIGH(arg1),
1969                                       tcg_constant_i32(arg2),
1970                                       tcg_constant_i32(arg2 >> 32),
1971                                       cond, label_arg(l));
1972         add_as_label_use(l, op);
1973     }
1974 }
1975 
1976 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1977                          TCGv_i64 arg1, TCGv_i64 arg2)
1978 {
1979     if (cond == TCG_COND_ALWAYS) {
1980         tcg_gen_movi_i64(ret, 1);
1981     } else if (cond == TCG_COND_NEVER) {
1982         tcg_gen_movi_i64(ret, 0);
1983     } else {
1984         if (TCG_TARGET_REG_BITS == 32) {
1985             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1986                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1987                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1988             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1989         } else {
1990             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1991         }
1992     }
1993 }
1994 
1995 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1996                           TCGv_i64 arg1, int64_t arg2)
1997 {
1998     if (TCG_TARGET_REG_BITS == 64) {
1999         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2000     } else if (cond == TCG_COND_ALWAYS) {
2001         tcg_gen_movi_i64(ret, 1);
2002     } else if (cond == TCG_COND_NEVER) {
2003         tcg_gen_movi_i64(ret, 0);
2004     } else {
2005         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2006                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
2007                          tcg_constant_i32(arg2),
2008                          tcg_constant_i32(arg2 >> 32), cond);
2009         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2010     }
2011 }
2012 
2013 void tcg_gen_negsetcondi_i64(TCGCond cond, TCGv_i64 ret,
2014                              TCGv_i64 arg1, int64_t arg2)
2015 {
2016     tcg_gen_negsetcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
2017 }
2018 
2019 void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
2020                             TCGv_i64 arg1, TCGv_i64 arg2)
2021 {
2022     if (cond == TCG_COND_ALWAYS) {
2023         tcg_gen_movi_i64(ret, -1);
2024     } else if (cond == TCG_COND_NEVER) {
2025         tcg_gen_movi_i64(ret, 0);
2026     } else if (TCG_TARGET_HAS_negsetcond_i64) {
2027         tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
2028     } else if (TCG_TARGET_REG_BITS == 32) {
2029         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
2030                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
2031                          TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
2032         tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
2033         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
2034     } else {
2035         tcg_gen_setcond_i64(cond, ret, arg1, arg2);
2036         tcg_gen_neg_i64(ret, ret);
2037     }
2038 }
2039 
2040 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2041 {
2042     if (arg2 == 0) {
2043         tcg_gen_movi_i64(ret, 0);
2044     } else if (is_power_of_2(arg2)) {
2045         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
2046     } else {
2047         tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
2048     }
2049 }
2050 
2051 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2052 {
2053     if (TCG_TARGET_HAS_div_i64) {
2054         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
2055     } else if (TCG_TARGET_HAS_div2_i64) {
2056         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2057         tcg_gen_sari_i64(t0, arg1, 63);
2058         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
2059         tcg_temp_free_i64(t0);
2060     } else {
2061         gen_helper_div_i64(ret, arg1, arg2);
2062     }
2063 }
2064 
2065 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2066 {
2067     if (TCG_TARGET_HAS_rem_i64) {
2068         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
2069     } else if (TCG_TARGET_HAS_div_i64) {
2070         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2071         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
2072         tcg_gen_mul_i64(t0, t0, arg2);
2073         tcg_gen_sub_i64(ret, arg1, t0);
2074         tcg_temp_free_i64(t0);
2075     } else if (TCG_TARGET_HAS_div2_i64) {
2076         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2077         tcg_gen_sari_i64(t0, arg1, 63);
2078         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
2079         tcg_temp_free_i64(t0);
2080     } else {
2081         gen_helper_rem_i64(ret, arg1, arg2);
2082     }
2083 }
2084 
2085 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2086 {
2087     if (TCG_TARGET_HAS_div_i64) {
2088         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
2089     } else if (TCG_TARGET_HAS_div2_i64) {
2090         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2091         TCGv_i64 zero = tcg_constant_i64(0);
2092         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2);
2093         tcg_temp_free_i64(t0);
2094     } else {
2095         gen_helper_divu_i64(ret, arg1, arg2);
2096     }
2097 }
2098 
2099 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2100 {
2101     if (TCG_TARGET_HAS_rem_i64) {
2102         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
2103     } else if (TCG_TARGET_HAS_div_i64) {
2104         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2105         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
2106         tcg_gen_mul_i64(t0, t0, arg2);
2107         tcg_gen_sub_i64(ret, arg1, t0);
2108         tcg_temp_free_i64(t0);
2109     } else if (TCG_TARGET_HAS_div2_i64) {
2110         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2111         TCGv_i64 zero = tcg_constant_i64(0);
2112         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2);
2113         tcg_temp_free_i64(t0);
2114     } else {
2115         gen_helper_remu_i64(ret, arg1, arg2);
2116     }
2117 }
2118 
2119 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
2120 {
2121     if (TCG_TARGET_REG_BITS == 32) {
2122         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2123         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2124     } else if (TCG_TARGET_HAS_ext8s_i64) {
2125         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
2126     } else {
2127         tcg_gen_shli_i64(ret, arg, 56);
2128         tcg_gen_sari_i64(ret, ret, 56);
2129     }
2130 }
2131 
2132 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
2133 {
2134     if (TCG_TARGET_REG_BITS == 32) {
2135         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2136         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2137     } else if (TCG_TARGET_HAS_ext16s_i64) {
2138         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
2139     } else {
2140         tcg_gen_shli_i64(ret, arg, 48);
2141         tcg_gen_sari_i64(ret, ret, 48);
2142     }
2143 }
2144 
2145 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
2146 {
2147     if (TCG_TARGET_REG_BITS == 32) {
2148         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2149         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2150     } else if (TCG_TARGET_HAS_ext32s_i64) {
2151         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
2152     } else {
2153         tcg_gen_shli_i64(ret, arg, 32);
2154         tcg_gen_sari_i64(ret, ret, 32);
2155     }
2156 }
2157 
2158 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
2159 {
2160     if (TCG_TARGET_REG_BITS == 32) {
2161         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2162         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2163     } else if (TCG_TARGET_HAS_ext8u_i64) {
2164         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
2165     } else {
2166         tcg_gen_andi_i64(ret, arg, 0xffu);
2167     }
2168 }
2169 
2170 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
2171 {
2172     if (TCG_TARGET_REG_BITS == 32) {
2173         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2174         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2175     } else if (TCG_TARGET_HAS_ext16u_i64) {
2176         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
2177     } else {
2178         tcg_gen_andi_i64(ret, arg, 0xffffu);
2179     }
2180 }
2181 
2182 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
2183 {
2184     if (TCG_TARGET_REG_BITS == 32) {
2185         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2186         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2187     } else if (TCG_TARGET_HAS_ext32u_i64) {
2188         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
2189     } else {
2190         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
2191     }
2192 }
2193 
2194 /*
2195  * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2196  *
2197  * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2198  *
2199  * With TCG_BSWAP_IZ, x == zero, else undefined.
2200  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2201  */
2202 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2203 {
2204     /* Only one extension flag may be present. */
2205     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2206 
2207     if (TCG_TARGET_REG_BITS == 32) {
2208         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
2209         if (flags & TCG_BSWAP_OS) {
2210             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2211         } else {
2212             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2213         }
2214     } else if (TCG_TARGET_HAS_bswap16_i64) {
2215         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
2216     } else {
2217         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2218         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2219 
2220                                             /* arg = ......ab or xxxxxxab */
2221         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .......a or .xxxxxxa */
2222         if (!(flags & TCG_BSWAP_IZ)) {
2223             tcg_gen_ext8u_i64(t0, t0);      /*  t0 = .......a */
2224         }
2225 
2226         if (flags & TCG_BSWAP_OS) {
2227             tcg_gen_shli_i64(t1, arg, 56);  /*  t1 = b....... */
2228             tcg_gen_sari_i64(t1, t1, 48);   /*  t1 = ssssssb. */
2229         } else if (flags & TCG_BSWAP_OZ) {
2230             tcg_gen_ext8u_i64(t1, arg);     /*  t1 = .......b */
2231             tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = ......b. */
2232         } else {
2233             tcg_gen_shli_i64(t1, arg, 8);   /*  t1 = xxxxxab. */
2234         }
2235 
2236         tcg_gen_or_i64(ret, t0, t1);        /* ret = ......ba (OZ) */
2237                                             /*       ssssssba (OS) */
2238                                             /*       xxxxxaba (no flag) */
2239         tcg_temp_free_i64(t0);
2240         tcg_temp_free_i64(t1);
2241     }
2242 }
2243 
2244 /*
2245  * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2246  *
2247  * Byte pattern: xxxxabcd -> yyyydcba
2248  *
2249  * With TCG_BSWAP_IZ, x == zero, else undefined.
2250  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2251  */
2252 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
2253 {
2254     /* Only one extension flag may be present. */
2255     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
2256 
2257     if (TCG_TARGET_REG_BITS == 32) {
2258         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2259         if (flags & TCG_BSWAP_OS) {
2260             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2261         } else {
2262             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2263         }
2264     } else if (TCG_TARGET_HAS_bswap32_i64) {
2265         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
2266     } else {
2267         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2268         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2269         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
2270 
2271                                             /* arg = xxxxabcd */
2272         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
2273         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
2274         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
2275         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
2276         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
2277 
2278         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
2279         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
2280         if (flags & TCG_BSWAP_OS) {
2281             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
2282         } else {
2283             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
2284         }
2285         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba (OS) */
2286                                             /*       ....dcba (else) */
2287 
2288         tcg_temp_free_i64(t0);
2289         tcg_temp_free_i64(t1);
2290     }
2291 }
2292 
2293 /*
2294  * bswap64_i64: 64-bit byte swap on a 64-bit value.
2295  *
2296  * Byte pattern: abcdefgh -> hgfedcba
2297  */
2298 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
2299 {
2300     if (TCG_TARGET_REG_BITS == 32) {
2301         TCGv_i32 t0, t1;
2302         t0 = tcg_temp_ebb_new_i32();
2303         t1 = tcg_temp_ebb_new_i32();
2304 
2305         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
2306         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
2307         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
2308         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
2309         tcg_temp_free_i32(t0);
2310         tcg_temp_free_i32(t1);
2311     } else if (TCG_TARGET_HAS_bswap64_i64) {
2312         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
2313     } else {
2314         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2315         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2316         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2317 
2318                                         /* arg = abcdefgh */
2319         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
2320         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
2321         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
2322         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
2323         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
2324         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
2325 
2326         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
2327         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
2328         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
2329         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
2330         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
2331         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
2332 
2333         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
2334         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
2335         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
2336 
2337         tcg_temp_free_i64(t0);
2338         tcg_temp_free_i64(t1);
2339         tcg_temp_free_i64(t2);
2340     }
2341 }
2342 
2343 /*
2344  * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2345  * See also include/qemu/bitops.h, hswap64.
2346  *
2347  * Byte pattern: abcdefgh -> ghefcdab
2348  */
2349 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2350 {
2351     uint64_t m = 0x0000ffff0000ffffull;
2352     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2353     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2354 
2355                                         /* arg = abcdefgh */
2356     tcg_gen_rotli_i64(t1, arg, 32);     /*  t1 = efghabcd */
2357     tcg_gen_andi_i64(t0, t1, m);        /*  t0 = ..gh..cd */
2358     tcg_gen_shli_i64(t0, t0, 16);       /*  t0 = gh..cd.. */
2359     tcg_gen_shri_i64(t1, t1, 16);       /*  t1 = ..efghab */
2360     tcg_gen_andi_i64(t1, t1, m);        /*  t1 = ..ef..ab */
2361     tcg_gen_or_i64(ret, t0, t1);        /* ret = ghefcdab */
2362 
2363     tcg_temp_free_i64(t0);
2364     tcg_temp_free_i64(t1);
2365 }
2366 
2367 /*
2368  * wswap_i64: Swap 32-bit words within a 64-bit value.
2369  *
2370  * Byte pattern: abcdefgh -> efghabcd
2371  */
2372 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
2373 {
2374     /* Swapping 2 32-bit elements is a rotate. */
2375     tcg_gen_rotli_i64(ret, arg, 32);
2376 }
2377 
2378 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
2379 {
2380     if (TCG_TARGET_REG_BITS == 32) {
2381         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2382         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
2383     } else if (TCG_TARGET_HAS_not_i64) {
2384         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
2385     } else {
2386         tcg_gen_xori_i64(ret, arg, -1);
2387     }
2388 }
2389 
2390 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2391 {
2392     if (TCG_TARGET_REG_BITS == 32) {
2393         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2394         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2395     } else if (TCG_TARGET_HAS_andc_i64) {
2396         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
2397     } else {
2398         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2399         tcg_gen_not_i64(t0, arg2);
2400         tcg_gen_and_i64(ret, arg1, t0);
2401         tcg_temp_free_i64(t0);
2402     }
2403 }
2404 
2405 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2406 {
2407     if (TCG_TARGET_REG_BITS == 32) {
2408         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2409         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2410     } else if (TCG_TARGET_HAS_eqv_i64) {
2411         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
2412     } else {
2413         tcg_gen_xor_i64(ret, arg1, arg2);
2414         tcg_gen_not_i64(ret, ret);
2415     }
2416 }
2417 
2418 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2419 {
2420     if (TCG_TARGET_REG_BITS == 32) {
2421         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2422         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2423     } else if (TCG_TARGET_HAS_nand_i64) {
2424         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2425     } else {
2426         tcg_gen_and_i64(ret, arg1, arg2);
2427         tcg_gen_not_i64(ret, ret);
2428     }
2429 }
2430 
2431 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2432 {
2433     if (TCG_TARGET_REG_BITS == 32) {
2434         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2435         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2436     } else if (TCG_TARGET_HAS_nor_i64) {
2437         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2438     } else {
2439         tcg_gen_or_i64(ret, arg1, arg2);
2440         tcg_gen_not_i64(ret, ret);
2441     }
2442 }
2443 
2444 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2445 {
2446     if (TCG_TARGET_REG_BITS == 32) {
2447         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2448         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2449     } else if (TCG_TARGET_HAS_orc_i64) {
2450         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2451     } else {
2452         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2453         tcg_gen_not_i64(t0, arg2);
2454         tcg_gen_or_i64(ret, arg1, t0);
2455         tcg_temp_free_i64(t0);
2456     }
2457 }
2458 
2459 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2460 {
2461     if (TCG_TARGET_HAS_clz_i64) {
2462         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2463     } else {
2464         gen_helper_clz_i64(ret, arg1, arg2);
2465     }
2466 }
2467 
2468 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2469 {
2470     if (TCG_TARGET_REG_BITS == 32
2471         && TCG_TARGET_HAS_clz_i32
2472         && arg2 <= 0xffffffffu) {
2473         TCGv_i32 t = tcg_temp_ebb_new_i32();
2474         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2475         tcg_gen_addi_i32(t, t, 32);
2476         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2477         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2478         tcg_temp_free_i32(t);
2479     } else {
2480         tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2481     }
2482 }
2483 
2484 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2485 {
2486     if (TCG_TARGET_HAS_ctz_i64) {
2487         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
2488     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
2489         TCGv_i64 z, t = tcg_temp_ebb_new_i64();
2490 
2491         if (TCG_TARGET_HAS_ctpop_i64) {
2492             tcg_gen_subi_i64(t, arg1, 1);
2493             tcg_gen_andc_i64(t, t, arg1);
2494             tcg_gen_ctpop_i64(t, t);
2495         } else {
2496             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
2497             tcg_gen_neg_i64(t, arg1);
2498             tcg_gen_and_i64(t, t, arg1);
2499             tcg_gen_clzi_i64(t, t, 64);
2500             tcg_gen_xori_i64(t, t, 63);
2501         }
2502         z = tcg_constant_i64(0);
2503         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2504         tcg_temp_free_i64(t);
2505         tcg_temp_free_i64(z);
2506     } else {
2507         gen_helper_ctz_i64(ret, arg1, arg2);
2508     }
2509 }
2510 
2511 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2512 {
2513     if (TCG_TARGET_REG_BITS == 32
2514         && TCG_TARGET_HAS_ctz_i32
2515         && arg2 <= 0xffffffffu) {
2516         TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2517         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2518         tcg_gen_addi_i32(t32, t32, 32);
2519         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2520         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2521         tcg_temp_free_i32(t32);
2522     } else if (!TCG_TARGET_HAS_ctz_i64
2523                && TCG_TARGET_HAS_ctpop_i64
2524                && arg2 == 64) {
2525         /* This equivalence has the advantage of not requiring a fixup.  */
2526         TCGv_i64 t = tcg_temp_ebb_new_i64();
2527         tcg_gen_subi_i64(t, arg1, 1);
2528         tcg_gen_andc_i64(t, t, arg1);
2529         tcg_gen_ctpop_i64(ret, t);
2530         tcg_temp_free_i64(t);
2531     } else {
2532         tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2533     }
2534 }
2535 
2536 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2537 {
2538     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2539         TCGv_i64 t = tcg_temp_ebb_new_i64();
2540         tcg_gen_sari_i64(t, arg, 63);
2541         tcg_gen_xor_i64(t, t, arg);
2542         tcg_gen_clzi_i64(t, t, 64);
2543         tcg_gen_subi_i64(ret, t, 1);
2544         tcg_temp_free_i64(t);
2545     } else {
2546         gen_helper_clrsb_i64(ret, arg);
2547     }
2548 }
2549 
2550 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2551 {
2552     if (TCG_TARGET_HAS_ctpop_i64) {
2553         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2554     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2555         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2556         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2557         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2558         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2559     } else {
2560         gen_helper_ctpop_i64(ret, arg1);
2561     }
2562 }
2563 
2564 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2565 {
2566     if (TCG_TARGET_HAS_rot_i64) {
2567         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2568     } else {
2569         TCGv_i64 t0, t1;
2570         t0 = tcg_temp_ebb_new_i64();
2571         t1 = tcg_temp_ebb_new_i64();
2572         tcg_gen_shl_i64(t0, arg1, arg2);
2573         tcg_gen_subfi_i64(t1, 64, arg2);
2574         tcg_gen_shr_i64(t1, arg1, t1);
2575         tcg_gen_or_i64(ret, t0, t1);
2576         tcg_temp_free_i64(t0);
2577         tcg_temp_free_i64(t1);
2578     }
2579 }
2580 
2581 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2582 {
2583     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2584     /* some cases can be optimized here */
2585     if (arg2 == 0) {
2586         tcg_gen_mov_i64(ret, arg1);
2587     } else if (TCG_TARGET_HAS_rot_i64) {
2588         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2589     } else {
2590         TCGv_i64 t0, t1;
2591         t0 = tcg_temp_ebb_new_i64();
2592         t1 = tcg_temp_ebb_new_i64();
2593         tcg_gen_shli_i64(t0, arg1, arg2);
2594         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2595         tcg_gen_or_i64(ret, t0, t1);
2596         tcg_temp_free_i64(t0);
2597         tcg_temp_free_i64(t1);
2598     }
2599 }
2600 
2601 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2602 {
2603     if (TCG_TARGET_HAS_rot_i64) {
2604         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2605     } else {
2606         TCGv_i64 t0, t1;
2607         t0 = tcg_temp_ebb_new_i64();
2608         t1 = tcg_temp_ebb_new_i64();
2609         tcg_gen_shr_i64(t0, arg1, arg2);
2610         tcg_gen_subfi_i64(t1, 64, arg2);
2611         tcg_gen_shl_i64(t1, arg1, t1);
2612         tcg_gen_or_i64(ret, t0, t1);
2613         tcg_temp_free_i64(t0);
2614         tcg_temp_free_i64(t1);
2615     }
2616 }
2617 
2618 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2619 {
2620     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2621     /* some cases can be optimized here */
2622     if (arg2 == 0) {
2623         tcg_gen_mov_i64(ret, arg1);
2624     } else {
2625         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2626     }
2627 }
2628 
2629 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2630                          unsigned int ofs, unsigned int len)
2631 {
2632     uint64_t mask;
2633     TCGv_i64 t1;
2634 
2635     tcg_debug_assert(ofs < 64);
2636     tcg_debug_assert(len > 0);
2637     tcg_debug_assert(len <= 64);
2638     tcg_debug_assert(ofs + len <= 64);
2639 
2640     if (len == 64) {
2641         tcg_gen_mov_i64(ret, arg2);
2642         return;
2643     }
2644 
2645     if (TCG_TARGET_REG_BITS == 64) {
2646         if (TCG_TARGET_deposit_valid(TCG_TYPE_I64, ofs, len)) {
2647             tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2648             return;
2649         }
2650     } else {
2651         if (ofs >= 32) {
2652             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2653                                 TCGV_LOW(arg2), ofs - 32, len);
2654             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2655             return;
2656         }
2657         if (ofs + len <= 32) {
2658             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2659                                 TCGV_LOW(arg2), ofs, len);
2660             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2661             return;
2662         }
2663     }
2664 
2665     t1 = tcg_temp_ebb_new_i64();
2666 
2667     if (TCG_TARGET_HAS_extract2_i64) {
2668         if (ofs + len == 64) {
2669             tcg_gen_shli_i64(t1, arg1, len);
2670             tcg_gen_extract2_i64(ret, t1, arg2, len);
2671             goto done;
2672         }
2673         if (ofs == 0) {
2674             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2675             tcg_gen_rotli_i64(ret, ret, len);
2676             goto done;
2677         }
2678     }
2679 
2680     mask = (1ull << len) - 1;
2681     if (ofs + len < 64) {
2682         tcg_gen_andi_i64(t1, arg2, mask);
2683         tcg_gen_shli_i64(t1, t1, ofs);
2684     } else {
2685         tcg_gen_shli_i64(t1, arg2, ofs);
2686     }
2687     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2688     tcg_gen_or_i64(ret, ret, t1);
2689  done:
2690     tcg_temp_free_i64(t1);
2691 }
2692 
2693 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2694                            unsigned int ofs, unsigned int len)
2695 {
2696     tcg_debug_assert(ofs < 64);
2697     tcg_debug_assert(len > 0);
2698     tcg_debug_assert(len <= 64);
2699     tcg_debug_assert(ofs + len <= 64);
2700 
2701     if (ofs + len == 64) {
2702         tcg_gen_shli_i64(ret, arg, ofs);
2703     } else if (ofs == 0) {
2704         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2705     } else if (TCG_TARGET_REG_BITS == 64 &&
2706                TCG_TARGET_deposit_valid(TCG_TYPE_I64, ofs, len)) {
2707         TCGv_i64 zero = tcg_constant_i64(0);
2708         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2709     } else {
2710         if (TCG_TARGET_REG_BITS == 32) {
2711             if (ofs >= 32) {
2712                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2713                                       ofs - 32, len);
2714                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2715                 return;
2716             }
2717             if (ofs + len <= 32) {
2718                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2719                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2720                 return;
2721             }
2722         }
2723         /* To help two-operand hosts we prefer to zero-extend first,
2724            which allows ARG to stay live.  */
2725         switch (len) {
2726         case 32:
2727             if (TCG_TARGET_HAS_ext32u_i64) {
2728                 tcg_gen_ext32u_i64(ret, arg);
2729                 tcg_gen_shli_i64(ret, ret, ofs);
2730                 return;
2731             }
2732             break;
2733         case 16:
2734             if (TCG_TARGET_HAS_ext16u_i64) {
2735                 tcg_gen_ext16u_i64(ret, arg);
2736                 tcg_gen_shli_i64(ret, ret, ofs);
2737                 return;
2738             }
2739             break;
2740         case 8:
2741             if (TCG_TARGET_HAS_ext8u_i64) {
2742                 tcg_gen_ext8u_i64(ret, arg);
2743                 tcg_gen_shli_i64(ret, ret, ofs);
2744                 return;
2745             }
2746             break;
2747         }
2748         /* Otherwise prefer zero-extension over AND for code size.  */
2749         switch (ofs + len) {
2750         case 32:
2751             if (TCG_TARGET_HAS_ext32u_i64) {
2752                 tcg_gen_shli_i64(ret, arg, ofs);
2753                 tcg_gen_ext32u_i64(ret, ret);
2754                 return;
2755             }
2756             break;
2757         case 16:
2758             if (TCG_TARGET_HAS_ext16u_i64) {
2759                 tcg_gen_shli_i64(ret, arg, ofs);
2760                 tcg_gen_ext16u_i64(ret, ret);
2761                 return;
2762             }
2763             break;
2764         case 8:
2765             if (TCG_TARGET_HAS_ext8u_i64) {
2766                 tcg_gen_shli_i64(ret, arg, ofs);
2767                 tcg_gen_ext8u_i64(ret, ret);
2768                 return;
2769             }
2770             break;
2771         }
2772         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2773         tcg_gen_shli_i64(ret, ret, ofs);
2774     }
2775 }
2776 
2777 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2778                          unsigned int ofs, unsigned int len)
2779 {
2780     tcg_debug_assert(ofs < 64);
2781     tcg_debug_assert(len > 0);
2782     tcg_debug_assert(len <= 64);
2783     tcg_debug_assert(ofs + len <= 64);
2784 
2785     /* Canonicalize certain special cases, even if extract is supported.  */
2786     if (ofs + len == 64) {
2787         tcg_gen_shri_i64(ret, arg, 64 - len);
2788         return;
2789     }
2790     if (ofs == 0) {
2791         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2792         return;
2793     }
2794 
2795     if (TCG_TARGET_REG_BITS == 32) {
2796         /* Look for a 32-bit extract within one of the two words.  */
2797         if (ofs >= 32) {
2798             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2799             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2800             return;
2801         }
2802         if (ofs + len <= 32) {
2803             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2804             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2805             return;
2806         }
2807         /* The field is split across two words.  One double-word
2808            shift is better than two double-word shifts.  */
2809         goto do_shift_and;
2810     }
2811 
2812     if (TCG_TARGET_extract_valid(TCG_TYPE_I64, ofs, len)) {
2813         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2814         return;
2815     }
2816 
2817     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2818     switch (ofs + len) {
2819     case 32:
2820         if (TCG_TARGET_HAS_ext32u_i64) {
2821             tcg_gen_ext32u_i64(ret, arg);
2822             tcg_gen_shri_i64(ret, ret, ofs);
2823             return;
2824         }
2825         break;
2826     case 16:
2827         if (TCG_TARGET_HAS_ext16u_i64) {
2828             tcg_gen_ext16u_i64(ret, arg);
2829             tcg_gen_shri_i64(ret, ret, ofs);
2830             return;
2831         }
2832         break;
2833     case 8:
2834         if (TCG_TARGET_HAS_ext8u_i64) {
2835             tcg_gen_ext8u_i64(ret, arg);
2836             tcg_gen_shri_i64(ret, ret, ofs);
2837             return;
2838         }
2839         break;
2840     }
2841 
2842     /* ??? Ideally we'd know what values are available for immediate AND.
2843        Assume that 8 bits are available, plus the special cases of 16 and 32,
2844        so that we get ext8u, ext16u, and ext32u.  */
2845     switch (len) {
2846     case 1 ... 8: case 16: case 32:
2847     do_shift_and:
2848         tcg_gen_shri_i64(ret, arg, ofs);
2849         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2850         break;
2851     default:
2852         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2853         tcg_gen_shri_i64(ret, ret, 64 - len);
2854         break;
2855     }
2856 }
2857 
2858 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2859                           unsigned int ofs, unsigned int len)
2860 {
2861     tcg_debug_assert(ofs < 64);
2862     tcg_debug_assert(len > 0);
2863     tcg_debug_assert(len <= 64);
2864     tcg_debug_assert(ofs + len <= 64);
2865 
2866     /* Canonicalize certain special cases, even if sextract is supported.  */
2867     if (ofs + len == 64) {
2868         tcg_gen_sari_i64(ret, arg, 64 - len);
2869         return;
2870     }
2871     if (ofs == 0) {
2872         switch (len) {
2873         case 32:
2874             tcg_gen_ext32s_i64(ret, arg);
2875             return;
2876         case 16:
2877             tcg_gen_ext16s_i64(ret, arg);
2878             return;
2879         case 8:
2880             tcg_gen_ext8s_i64(ret, arg);
2881             return;
2882         }
2883     }
2884 
2885     if (TCG_TARGET_REG_BITS == 32) {
2886         /* Look for a 32-bit extract within one of the two words.  */
2887         if (ofs >= 32) {
2888             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2889         } else if (ofs + len <= 32) {
2890             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2891         } else if (ofs == 0) {
2892             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2893             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2894             return;
2895         } else if (len > 32) {
2896             TCGv_i32 t = tcg_temp_ebb_new_i32();
2897             /* Extract the bits for the high word normally.  */
2898             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2899             /* Shift the field down for the low part.  */
2900             tcg_gen_shri_i64(ret, arg, ofs);
2901             /* Overwrite the shift into the high part.  */
2902             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2903             tcg_temp_free_i32(t);
2904             return;
2905         } else {
2906             /* Shift the field down for the low part, such that the
2907                field sits at the MSB.  */
2908             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2909             /* Shift the field down from the MSB, sign extending.  */
2910             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2911         }
2912         /* Sign-extend the field from 32 bits.  */
2913         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2914         return;
2915     }
2916 
2917     if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, ofs, len)) {
2918         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2919         return;
2920     }
2921 
2922     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2923     switch (ofs + len) {
2924     case 32:
2925         if (TCG_TARGET_HAS_ext32s_i64) {
2926             tcg_gen_ext32s_i64(ret, arg);
2927             tcg_gen_sari_i64(ret, ret, ofs);
2928             return;
2929         }
2930         break;
2931     case 16:
2932         if (TCG_TARGET_HAS_ext16s_i64) {
2933             tcg_gen_ext16s_i64(ret, arg);
2934             tcg_gen_sari_i64(ret, ret, ofs);
2935             return;
2936         }
2937         break;
2938     case 8:
2939         if (TCG_TARGET_HAS_ext8s_i64) {
2940             tcg_gen_ext8s_i64(ret, arg);
2941             tcg_gen_sari_i64(ret, ret, ofs);
2942             return;
2943         }
2944         break;
2945     }
2946     switch (len) {
2947     case 32:
2948         if (TCG_TARGET_HAS_ext32s_i64) {
2949             tcg_gen_shri_i64(ret, arg, ofs);
2950             tcg_gen_ext32s_i64(ret, ret);
2951             return;
2952         }
2953         break;
2954     case 16:
2955         if (TCG_TARGET_HAS_ext16s_i64) {
2956             tcg_gen_shri_i64(ret, arg, ofs);
2957             tcg_gen_ext16s_i64(ret, ret);
2958             return;
2959         }
2960         break;
2961     case 8:
2962         if (TCG_TARGET_HAS_ext8s_i64) {
2963             tcg_gen_shri_i64(ret, arg, ofs);
2964             tcg_gen_ext8s_i64(ret, ret);
2965             return;
2966         }
2967         break;
2968     }
2969     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2970     tcg_gen_sari_i64(ret, ret, 64 - len);
2971 }
2972 
2973 /*
2974  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2975  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2976  */
2977 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2978                           unsigned int ofs)
2979 {
2980     tcg_debug_assert(ofs <= 64);
2981     if (ofs == 0) {
2982         tcg_gen_mov_i64(ret, al);
2983     } else if (ofs == 64) {
2984         tcg_gen_mov_i64(ret, ah);
2985     } else if (al == ah) {
2986         tcg_gen_rotri_i64(ret, al, ofs);
2987     } else if (TCG_TARGET_HAS_extract2_i64) {
2988         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2989     } else {
2990         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2991         tcg_gen_shri_i64(t0, al, ofs);
2992         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2993         tcg_temp_free_i64(t0);
2994     }
2995 }
2996 
2997 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2998                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2999 {
3000     if (cond == TCG_COND_ALWAYS) {
3001         tcg_gen_mov_i64(ret, v1);
3002     } else if (cond == TCG_COND_NEVER) {
3003         tcg_gen_mov_i64(ret, v2);
3004     } else if (TCG_TARGET_REG_BITS == 64) {
3005         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
3006     } else {
3007         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
3008         TCGv_i32 zero = tcg_constant_i32(0);
3009 
3010         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
3011                          TCGV_LOW(c1), TCGV_HIGH(c1),
3012                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
3013 
3014         tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, zero,
3015                             TCGV_LOW(v1), TCGV_LOW(v2));
3016         tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, zero,
3017                             TCGV_HIGH(v1), TCGV_HIGH(v2));
3018 
3019         tcg_temp_free_i32(t0);
3020     }
3021 }
3022 
3023 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3024                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3025 {
3026     if (TCG_TARGET_HAS_add2_i64) {
3027         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
3028     } else {
3029         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3030         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3031         tcg_gen_add_i64(t0, al, bl);
3032         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
3033         tcg_gen_add_i64(rh, ah, bh);
3034         tcg_gen_add_i64(rh, rh, t1);
3035         tcg_gen_mov_i64(rl, t0);
3036         tcg_temp_free_i64(t0);
3037         tcg_temp_free_i64(t1);
3038     }
3039 }
3040 
3041 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
3042                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
3043 {
3044     if (TCG_TARGET_HAS_sub2_i64) {
3045         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
3046     } else {
3047         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3048         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3049         tcg_gen_sub_i64(t0, al, bl);
3050         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
3051         tcg_gen_sub_i64(rh, ah, bh);
3052         tcg_gen_sub_i64(rh, rh, t1);
3053         tcg_gen_mov_i64(rl, t0);
3054         tcg_temp_free_i64(t0);
3055         tcg_temp_free_i64(t1);
3056     }
3057 }
3058 
3059 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3060 {
3061     if (TCG_TARGET_HAS_mulu2_i64) {
3062         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
3063     } else if (TCG_TARGET_HAS_muluh_i64) {
3064         TCGv_i64 t = tcg_temp_ebb_new_i64();
3065         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3066         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
3067         tcg_gen_mov_i64(rl, t);
3068         tcg_temp_free_i64(t);
3069     } else {
3070         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3071         tcg_gen_mul_i64(t0, arg1, arg2);
3072         gen_helper_muluh_i64(rh, arg1, arg2);
3073         tcg_gen_mov_i64(rl, t0);
3074         tcg_temp_free_i64(t0);
3075     }
3076 }
3077 
3078 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3079 {
3080     if (TCG_TARGET_HAS_muls2_i64) {
3081         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
3082     } else if (TCG_TARGET_HAS_mulsh_i64) {
3083         TCGv_i64 t = tcg_temp_ebb_new_i64();
3084         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
3085         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
3086         tcg_gen_mov_i64(rl, t);
3087         tcg_temp_free_i64(t);
3088     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
3089         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3090         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3091         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3092         TCGv_i64 t3 = tcg_temp_ebb_new_i64();
3093         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3094         /* Adjust for negative inputs.  */
3095         tcg_gen_sari_i64(t2, arg1, 63);
3096         tcg_gen_sari_i64(t3, arg2, 63);
3097         tcg_gen_and_i64(t2, t2, arg2);
3098         tcg_gen_and_i64(t3, t3, arg1);
3099         tcg_gen_sub_i64(rh, t1, t2);
3100         tcg_gen_sub_i64(rh, rh, t3);
3101         tcg_gen_mov_i64(rl, t0);
3102         tcg_temp_free_i64(t0);
3103         tcg_temp_free_i64(t1);
3104         tcg_temp_free_i64(t2);
3105         tcg_temp_free_i64(t3);
3106     } else {
3107         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3108         tcg_gen_mul_i64(t0, arg1, arg2);
3109         gen_helper_mulsh_i64(rh, arg1, arg2);
3110         tcg_gen_mov_i64(rl, t0);
3111         tcg_temp_free_i64(t0);
3112     }
3113 }
3114 
3115 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
3116 {
3117     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
3118     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
3119     TCGv_i64 t2 = tcg_temp_ebb_new_i64();
3120     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
3121     /* Adjust for negative input for the signed arg1.  */
3122     tcg_gen_sari_i64(t2, arg1, 63);
3123     tcg_gen_and_i64(t2, t2, arg2);
3124     tcg_gen_sub_i64(rh, t1, t2);
3125     tcg_gen_mov_i64(rl, t0);
3126     tcg_temp_free_i64(t0);
3127     tcg_temp_free_i64(t1);
3128     tcg_temp_free_i64(t2);
3129 }
3130 
3131 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3132 {
3133     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
3134 }
3135 
3136 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3137 {
3138     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
3139 }
3140 
3141 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3142 {
3143     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
3144 }
3145 
3146 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
3147 {
3148     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
3149 }
3150 
3151 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
3152 {
3153     TCGv_i64 t = tcg_temp_ebb_new_i64();
3154 
3155     tcg_gen_sari_i64(t, a, 63);
3156     tcg_gen_xor_i64(ret, a, t);
3157     tcg_gen_sub_i64(ret, ret, t);
3158     tcg_temp_free_i64(t);
3159 }
3160 
3161 /* Size changing operations.  */
3162 
3163 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3164 {
3165     if (TCG_TARGET_REG_BITS == 32) {
3166         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
3167     } else if (TCG_TARGET_HAS_extr_i64_i32) {
3168         tcg_gen_op2(INDEX_op_extrl_i64_i32, TCG_TYPE_I32,
3169                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3170     } else {
3171         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
3172     }
3173 }
3174 
3175 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
3176 {
3177     if (TCG_TARGET_REG_BITS == 32) {
3178         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
3179     } else if (TCG_TARGET_HAS_extr_i64_i32) {
3180         tcg_gen_op2(INDEX_op_extrh_i64_i32, TCG_TYPE_I32,
3181                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
3182     } else {
3183         TCGv_i64 t = tcg_temp_ebb_new_i64();
3184         tcg_gen_shri_i64(t, arg, 32);
3185         tcg_gen_mov_i32(ret, (TCGv_i32)t);
3186         tcg_temp_free_i64(t);
3187     }
3188 }
3189 
3190 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3191 {
3192     if (TCG_TARGET_REG_BITS == 32) {
3193         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3194         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
3195     } else {
3196         tcg_gen_op2(INDEX_op_extu_i32_i64, TCG_TYPE_I64,
3197                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3198     }
3199 }
3200 
3201 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
3202 {
3203     if (TCG_TARGET_REG_BITS == 32) {
3204         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
3205         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
3206     } else {
3207         tcg_gen_op2(INDEX_op_ext_i32_i64, TCG_TYPE_I64,
3208                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
3209     }
3210 }
3211 
3212 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
3213 {
3214     TCGv_i64 tmp;
3215 
3216     if (TCG_TARGET_REG_BITS == 32) {
3217         tcg_gen_mov_i32(TCGV_LOW(dest), low);
3218         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
3219         return;
3220     }
3221 
3222     tmp = tcg_temp_ebb_new_i64();
3223     /* These extensions are only needed for type correctness.
3224        We may be able to do better given target specific information.  */
3225     tcg_gen_extu_i32_i64(tmp, high);
3226     tcg_gen_extu_i32_i64(dest, low);
3227     /* If deposit is available, use it.  Otherwise use the extra
3228        knowledge that we have of the zero-extensions above.  */
3229     if (TCG_TARGET_deposit_valid(TCG_TYPE_I64, 32, 32)) {
3230         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
3231     } else {
3232         tcg_gen_shli_i64(tmp, tmp, 32);
3233         tcg_gen_or_i64(dest, dest, tmp);
3234     }
3235     tcg_temp_free_i64(tmp);
3236 }
3237 
3238 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
3239 {
3240     if (TCG_TARGET_REG_BITS == 32) {
3241         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
3242         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
3243     } else {
3244         tcg_gen_extrl_i64_i32(lo, arg);
3245         tcg_gen_extrh_i64_i32(hi, arg);
3246     }
3247 }
3248 
3249 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
3250 {
3251     tcg_gen_ext32u_i64(lo, arg);
3252     tcg_gen_shri_i64(hi, arg, 32);
3253 }
3254 
3255 void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
3256 {
3257     tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
3258 }
3259 
3260 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
3261 {
3262     tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
3263     tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
3264 }
3265 
3266 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
3267 {
3268     tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
3269     tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
3270 }
3271 
3272 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
3273 {
3274     if (dst != src) {
3275         tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
3276         tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
3277     }
3278 }
3279 
3280 void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset)
3281 {
3282     if (HOST_BIG_ENDIAN) {
3283         tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset);
3284         tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset + 8);
3285     } else {
3286         tcg_gen_ld_i64(TCGV128_LOW(ret), base, offset);
3287         tcg_gen_ld_i64(TCGV128_HIGH(ret), base, offset + 8);
3288     }
3289 }
3290 
3291 void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset)
3292 {
3293     if (HOST_BIG_ENDIAN) {
3294         tcg_gen_st_i64(TCGV128_HIGH(val), base, offset);
3295         tcg_gen_st_i64(TCGV128_LOW(val), base, offset + 8);
3296     } else {
3297         tcg_gen_st_i64(TCGV128_LOW(val), base, offset);
3298         tcg_gen_st_i64(TCGV128_HIGH(val), base, offset + 8);
3299     }
3300 }
3301 
3302 /* QEMU specific operations.  */
3303 
3304 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
3305 {
3306     /*
3307      * Let the jit code return the read-only version of the
3308      * TranslationBlock, so that we minimize the pc-relative
3309      * distance of the address of the exit_tb code to TB.
3310      * This will improve utilization of pc-relative address loads.
3311      *
3312      * TODO: Move this to translator_loop, so that all const
3313      * TranslationBlock pointers refer to read-only memory.
3314      * This requires coordination with targets that do not use
3315      * the translator_loop.
3316      */
3317     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
3318 
3319     if (tb == NULL) {
3320         tcg_debug_assert(idx == 0);
3321     } else if (idx <= TB_EXIT_IDXMAX) {
3322 #ifdef CONFIG_DEBUG_TCG
3323         /* This is an exit following a goto_tb.  Verify that we have
3324            seen this numbered exit before, via tcg_gen_goto_tb.  */
3325         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
3326 #endif
3327     } else {
3328         /* This is an exit via the exitreq label.  */
3329         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
3330     }
3331 
3332     tcg_gen_op1i(INDEX_op_exit_tb, 0, val);
3333 }
3334 
3335 void tcg_gen_goto_tb(unsigned idx)
3336 {
3337     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3338     tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
3339     /* We only support two chained exits.  */
3340     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
3341 #ifdef CONFIG_DEBUG_TCG
3342     /* Verify that we haven't seen this numbered exit before.  */
3343     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
3344     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
3345 #endif
3346     plugin_gen_disable_mem_helpers();
3347     tcg_gen_op1i(INDEX_op_goto_tb, 0, idx);
3348 }
3349 
3350 void tcg_gen_lookup_and_goto_ptr(void)
3351 {
3352     TCGv_ptr ptr;
3353 
3354     if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
3355         tcg_gen_exit_tb(NULL, 0);
3356         return;
3357     }
3358 
3359     plugin_gen_disable_mem_helpers();
3360     ptr = tcg_temp_ebb_new_ptr();
3361     gen_helper_lookup_tb_ptr(ptr, tcg_env);
3362     tcg_gen_op1i(INDEX_op_goto_ptr, TCG_TYPE_PTR, tcgv_ptr_arg(ptr));
3363     tcg_temp_free_ptr(ptr);
3364 }
3365