xref: /qemu/tcg/tci.c (revision fb5c28e1955537228fe59a901e6cf6258da682d5)
1 /*
2  * Tiny Code Interpreter for QEMU
3  *
4  * Copyright (c) 2009, 2011, 2016 Stefan Weil
5  *
6  * This program is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation, either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 #include "qemu/osdep.h"
21 #include "tcg/tcg.h"
22 #include "tcg/helper-info.h"
23 #include "tcg/tcg-ldst.h"
24 #include "disas/dis-asm.h"
25 #include <ffi.h>
26 
27 
28 /*
29  * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
30  * Without assertions, the interpreter runs much faster.
31  */
32 #if defined(CONFIG_DEBUG_TCG)
33 # define tci_assert(cond) assert(cond)
34 #else
35 # define tci_assert(cond) ((void)(cond))
36 #endif
37 
38 __thread uintptr_t tci_tb_ptr;
39 
40 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
41                             uint32_t low_index, uint64_t value)
42 {
43     regs[low_index] = (uint32_t)value;
44     regs[high_index] = value >> 32;
45 }
46 
47 /* Create a 64 bit value from two 32 bit values. */
48 static uint64_t tci_uint64(uint32_t high, uint32_t low)
49 {
50     return ((uint64_t)high << 32) + low;
51 }
52 
53 /*
54  * Load sets of arguments all at once.  The naming convention is:
55  *   tci_args_<arguments>
56  * where arguments is a sequence of
57  *
58  *   b = immediate (bit position)
59  *   c = condition (TCGCond)
60  *   i = immediate (uint32_t)
61  *   I = immediate (tcg_target_ulong)
62  *   l = label or pointer
63  *   m = immediate (MemOpIdx)
64  *   n = immediate (call return length)
65  *   r = register
66  *   s = signed ldst offset
67  */
68 
69 static void tci_args_l(uint32_t insn, const void *tb_ptr, void **l0)
70 {
71     int diff = sextract32(insn, 12, 20);
72     *l0 = diff ? (void *)tb_ptr + diff : NULL;
73 }
74 
75 static void tci_args_r(uint32_t insn, TCGReg *r0)
76 {
77     *r0 = extract32(insn, 8, 4);
78 }
79 
80 static void tci_args_nl(uint32_t insn, const void *tb_ptr,
81                         uint8_t *n0, void **l1)
82 {
83     *n0 = extract32(insn, 8, 4);
84     *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
85 }
86 
87 static void tci_args_rl(uint32_t insn, const void *tb_ptr,
88                         TCGReg *r0, void **l1)
89 {
90     *r0 = extract32(insn, 8, 4);
91     *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
92 }
93 
94 static void tci_args_rr(uint32_t insn, TCGReg *r0, TCGReg *r1)
95 {
96     *r0 = extract32(insn, 8, 4);
97     *r1 = extract32(insn, 12, 4);
98 }
99 
100 static void tci_args_ri(uint32_t insn, TCGReg *r0, tcg_target_ulong *i1)
101 {
102     *r0 = extract32(insn, 8, 4);
103     *i1 = sextract32(insn, 12, 20);
104 }
105 
106 static void tci_args_rrm(uint32_t insn, TCGReg *r0,
107                          TCGReg *r1, MemOpIdx *m2)
108 {
109     *r0 = extract32(insn, 8, 4);
110     *r1 = extract32(insn, 12, 4);
111     *m2 = extract32(insn, 16, 16);
112 }
113 
114 static void tci_args_rrr(uint32_t insn, TCGReg *r0, TCGReg *r1, TCGReg *r2)
115 {
116     *r0 = extract32(insn, 8, 4);
117     *r1 = extract32(insn, 12, 4);
118     *r2 = extract32(insn, 16, 4);
119 }
120 
121 static void tci_args_rrs(uint32_t insn, TCGReg *r0, TCGReg *r1, int32_t *i2)
122 {
123     *r0 = extract32(insn, 8, 4);
124     *r1 = extract32(insn, 12, 4);
125     *i2 = sextract32(insn, 16, 16);
126 }
127 
128 static void tci_args_rrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
129                           uint8_t *i2, uint8_t *i3)
130 {
131     *r0 = extract32(insn, 8, 4);
132     *r1 = extract32(insn, 12, 4);
133     *i2 = extract32(insn, 16, 6);
134     *i3 = extract32(insn, 22, 6);
135 }
136 
137 static void tci_args_rrrc(uint32_t insn,
138                           TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
139 {
140     *r0 = extract32(insn, 8, 4);
141     *r1 = extract32(insn, 12, 4);
142     *r2 = extract32(insn, 16, 4);
143     *c3 = extract32(insn, 20, 4);
144 }
145 
146 static void tci_args_rrrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
147                            TCGReg *r2, uint8_t *i3, uint8_t *i4)
148 {
149     *r0 = extract32(insn, 8, 4);
150     *r1 = extract32(insn, 12, 4);
151     *r2 = extract32(insn, 16, 4);
152     *i3 = extract32(insn, 20, 6);
153     *i4 = extract32(insn, 26, 6);
154 }
155 
156 static void tci_args_rrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
157                            TCGReg *r2, TCGReg *r3, TCGReg *r4)
158 {
159     *r0 = extract32(insn, 8, 4);
160     *r1 = extract32(insn, 12, 4);
161     *r2 = extract32(insn, 16, 4);
162     *r3 = extract32(insn, 20, 4);
163     *r4 = extract32(insn, 24, 4);
164 }
165 
166 static void tci_args_rrrr(uint32_t insn,
167                           TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
168 {
169     *r0 = extract32(insn, 8, 4);
170     *r1 = extract32(insn, 12, 4);
171     *r2 = extract32(insn, 16, 4);
172     *r3 = extract32(insn, 20, 4);
173 }
174 
175 static void tci_args_rrrrrc(uint32_t insn, TCGReg *r0, TCGReg *r1,
176                             TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
177 {
178     *r0 = extract32(insn, 8, 4);
179     *r1 = extract32(insn, 12, 4);
180     *r2 = extract32(insn, 16, 4);
181     *r3 = extract32(insn, 20, 4);
182     *r4 = extract32(insn, 24, 4);
183     *c5 = extract32(insn, 28, 4);
184 }
185 
186 static void tci_args_rrrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
187                             TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
188 {
189     *r0 = extract32(insn, 8, 4);
190     *r1 = extract32(insn, 12, 4);
191     *r2 = extract32(insn, 16, 4);
192     *r3 = extract32(insn, 20, 4);
193     *r4 = extract32(insn, 24, 4);
194     *r5 = extract32(insn, 28, 4);
195 }
196 
197 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
198 {
199     bool result = false;
200     int32_t i0 = u0;
201     int32_t i1 = u1;
202     switch (condition) {
203     case TCG_COND_EQ:
204         result = (u0 == u1);
205         break;
206     case TCG_COND_NE:
207         result = (u0 != u1);
208         break;
209     case TCG_COND_LT:
210         result = (i0 < i1);
211         break;
212     case TCG_COND_GE:
213         result = (i0 >= i1);
214         break;
215     case TCG_COND_LE:
216         result = (i0 <= i1);
217         break;
218     case TCG_COND_GT:
219         result = (i0 > i1);
220         break;
221     case TCG_COND_LTU:
222         result = (u0 < u1);
223         break;
224     case TCG_COND_GEU:
225         result = (u0 >= u1);
226         break;
227     case TCG_COND_LEU:
228         result = (u0 <= u1);
229         break;
230     case TCG_COND_GTU:
231         result = (u0 > u1);
232         break;
233     case TCG_COND_TSTEQ:
234         result = (u0 & u1) == 0;
235         break;
236     case TCG_COND_TSTNE:
237         result = (u0 & u1) != 0;
238         break;
239     default:
240         g_assert_not_reached();
241     }
242     return result;
243 }
244 
245 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
246 {
247     bool result = false;
248     int64_t i0 = u0;
249     int64_t i1 = u1;
250     switch (condition) {
251     case TCG_COND_EQ:
252         result = (u0 == u1);
253         break;
254     case TCG_COND_NE:
255         result = (u0 != u1);
256         break;
257     case TCG_COND_LT:
258         result = (i0 < i1);
259         break;
260     case TCG_COND_GE:
261         result = (i0 >= i1);
262         break;
263     case TCG_COND_LE:
264         result = (i0 <= i1);
265         break;
266     case TCG_COND_GT:
267         result = (i0 > i1);
268         break;
269     case TCG_COND_LTU:
270         result = (u0 < u1);
271         break;
272     case TCG_COND_GEU:
273         result = (u0 >= u1);
274         break;
275     case TCG_COND_LEU:
276         result = (u0 <= u1);
277         break;
278     case TCG_COND_GTU:
279         result = (u0 > u1);
280         break;
281     case TCG_COND_TSTEQ:
282         result = (u0 & u1) == 0;
283         break;
284     case TCG_COND_TSTNE:
285         result = (u0 & u1) != 0;
286         break;
287     default:
288         g_assert_not_reached();
289     }
290     return result;
291 }
292 
293 static uint64_t tci_qemu_ld(CPUArchState *env, uint64_t taddr,
294                             MemOpIdx oi, const void *tb_ptr)
295 {
296     MemOp mop = get_memop(oi);
297     uintptr_t ra = (uintptr_t)tb_ptr;
298 
299     switch (mop & MO_SSIZE) {
300     case MO_UB:
301         return helper_ldub_mmu(env, taddr, oi, ra);
302     case MO_SB:
303         return helper_ldsb_mmu(env, taddr, oi, ra);
304     case MO_UW:
305         return helper_lduw_mmu(env, taddr, oi, ra);
306     case MO_SW:
307         return helper_ldsw_mmu(env, taddr, oi, ra);
308     case MO_UL:
309         return helper_ldul_mmu(env, taddr, oi, ra);
310     case MO_SL:
311         return helper_ldsl_mmu(env, taddr, oi, ra);
312     case MO_UQ:
313         return helper_ldq_mmu(env, taddr, oi, ra);
314     default:
315         g_assert_not_reached();
316     }
317 }
318 
319 static void tci_qemu_st(CPUArchState *env, uint64_t taddr, uint64_t val,
320                         MemOpIdx oi, const void *tb_ptr)
321 {
322     MemOp mop = get_memop(oi);
323     uintptr_t ra = (uintptr_t)tb_ptr;
324 
325     switch (mop & MO_SIZE) {
326     case MO_UB:
327         helper_stb_mmu(env, taddr, val, oi, ra);
328         break;
329     case MO_UW:
330         helper_stw_mmu(env, taddr, val, oi, ra);
331         break;
332     case MO_UL:
333         helper_stl_mmu(env, taddr, val, oi, ra);
334         break;
335     case MO_UQ:
336         helper_stq_mmu(env, taddr, val, oi, ra);
337         break;
338     default:
339         g_assert_not_reached();
340     }
341 }
342 
343 #if TCG_TARGET_REG_BITS == 64
344 # define CASE_32_64(x) \
345         case glue(glue(INDEX_op_, x), _i64): \
346         case glue(glue(INDEX_op_, x), _i32):
347 # define CASE_64(x) \
348         case glue(glue(INDEX_op_, x), _i64):
349 #else
350 # define CASE_32_64(x) \
351         case glue(glue(INDEX_op_, x), _i32):
352 # define CASE_64(x)
353 #endif
354 
355 /* Interpret pseudo code in tb. */
356 /*
357  * Disable CFI checks.
358  * One possible operation in the pseudo code is a call to binary code.
359  * Therefore, disable CFI checks in the interpreter function
360  */
361 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
362                                             const void *v_tb_ptr)
363 {
364     const uint32_t *tb_ptr = v_tb_ptr;
365     tcg_target_ulong regs[TCG_TARGET_NB_REGS];
366     uint64_t stack[(TCG_STATIC_CALL_ARGS_SIZE + TCG_STATIC_FRAME_SIZE)
367                    / sizeof(uint64_t)];
368 
369     regs[TCG_AREG0] = (tcg_target_ulong)env;
370     regs[TCG_REG_CALL_STACK] = (uintptr_t)stack;
371     tci_assert(tb_ptr);
372 
373     for (;;) {
374         uint32_t insn;
375         TCGOpcode opc;
376         TCGReg r0, r1, r2, r3, r4, r5;
377         tcg_target_ulong t1;
378         TCGCond condition;
379         uint8_t pos, len;
380         uint32_t tmp32;
381         uint64_t tmp64, taddr;
382         uint64_t T1, T2;
383         MemOpIdx oi;
384         int32_t ofs;
385         void *ptr;
386 
387         insn = *tb_ptr++;
388         opc = extract32(insn, 0, 8);
389 
390         switch (opc) {
391         case INDEX_op_call:
392             {
393                 void *call_slots[MAX_CALL_IARGS];
394                 ffi_cif *cif;
395                 void *func;
396                 unsigned i, s, n;
397 
398                 tci_args_nl(insn, tb_ptr, &len, &ptr);
399                 func = ((void **)ptr)[0];
400                 cif = ((void **)ptr)[1];
401 
402                 n = cif->nargs;
403                 for (i = s = 0; i < n; ++i) {
404                     ffi_type *t = cif->arg_types[i];
405                     call_slots[i] = &stack[s];
406                     s += DIV_ROUND_UP(t->size, 8);
407                 }
408 
409                 /* Helper functions may need to access the "return address" */
410                 tci_tb_ptr = (uintptr_t)tb_ptr;
411                 ffi_call(cif, func, stack, call_slots);
412             }
413 
414             switch (len) {
415             case 0: /* void */
416                 break;
417             case 1: /* uint32_t */
418                 /*
419                  * The result winds up "left-aligned" in the stack[0] slot.
420                  * Note that libffi has an odd special case in that it will
421                  * always widen an integral result to ffi_arg.
422                  */
423                 if (sizeof(ffi_arg) == 8) {
424                     regs[TCG_REG_R0] = (uint32_t)stack[0];
425                 } else {
426                     regs[TCG_REG_R0] = *(uint32_t *)stack;
427                 }
428                 break;
429             case 2: /* uint64_t */
430                 /*
431                  * For TCG_TARGET_REG_BITS == 32, the register pair
432                  * must stay in host memory order.
433                  */
434                 memcpy(&regs[TCG_REG_R0], stack, 8);
435                 break;
436             case 3: /* Int128 */
437                 memcpy(&regs[TCG_REG_R0], stack, 16);
438                 break;
439             default:
440                 g_assert_not_reached();
441             }
442             break;
443 
444         case INDEX_op_br:
445             tci_args_l(insn, tb_ptr, &ptr);
446             tb_ptr = ptr;
447             continue;
448         case INDEX_op_setcond_i32:
449             tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
450             regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
451             break;
452         case INDEX_op_movcond_i32:
453             tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
454             tmp32 = tci_compare32(regs[r1], regs[r2], condition);
455             regs[r0] = regs[tmp32 ? r3 : r4];
456             break;
457 #if TCG_TARGET_REG_BITS == 32
458         case INDEX_op_setcond2_i32:
459             tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
460             T1 = tci_uint64(regs[r2], regs[r1]);
461             T2 = tci_uint64(regs[r4], regs[r3]);
462             regs[r0] = tci_compare64(T1, T2, condition);
463             break;
464 #elif TCG_TARGET_REG_BITS == 64
465         case INDEX_op_setcond_i64:
466             tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
467             regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
468             break;
469         case INDEX_op_movcond_i64:
470             tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
471             tmp32 = tci_compare64(regs[r1], regs[r2], condition);
472             regs[r0] = regs[tmp32 ? r3 : r4];
473             break;
474 #endif
475         CASE_32_64(mov)
476             tci_args_rr(insn, &r0, &r1);
477             regs[r0] = regs[r1];
478             break;
479         case INDEX_op_tci_movi:
480             tci_args_ri(insn, &r0, &t1);
481             regs[r0] = t1;
482             break;
483         case INDEX_op_tci_movl:
484             tci_args_rl(insn, tb_ptr, &r0, &ptr);
485             regs[r0] = *(tcg_target_ulong *)ptr;
486             break;
487 
488             /* Load/store operations (32 bit). */
489 
490         CASE_32_64(ld8u)
491             tci_args_rrs(insn, &r0, &r1, &ofs);
492             ptr = (void *)(regs[r1] + ofs);
493             regs[r0] = *(uint8_t *)ptr;
494             break;
495         CASE_32_64(ld8s)
496             tci_args_rrs(insn, &r0, &r1, &ofs);
497             ptr = (void *)(regs[r1] + ofs);
498             regs[r0] = *(int8_t *)ptr;
499             break;
500         CASE_32_64(ld16u)
501             tci_args_rrs(insn, &r0, &r1, &ofs);
502             ptr = (void *)(regs[r1] + ofs);
503             regs[r0] = *(uint16_t *)ptr;
504             break;
505         CASE_32_64(ld16s)
506             tci_args_rrs(insn, &r0, &r1, &ofs);
507             ptr = (void *)(regs[r1] + ofs);
508             regs[r0] = *(int16_t *)ptr;
509             break;
510         case INDEX_op_ld_i32:
511         CASE_64(ld32u)
512             tci_args_rrs(insn, &r0, &r1, &ofs);
513             ptr = (void *)(regs[r1] + ofs);
514             regs[r0] = *(uint32_t *)ptr;
515             break;
516         CASE_32_64(st8)
517             tci_args_rrs(insn, &r0, &r1, &ofs);
518             ptr = (void *)(regs[r1] + ofs);
519             *(uint8_t *)ptr = regs[r0];
520             break;
521         CASE_32_64(st16)
522             tci_args_rrs(insn, &r0, &r1, &ofs);
523             ptr = (void *)(regs[r1] + ofs);
524             *(uint16_t *)ptr = regs[r0];
525             break;
526         case INDEX_op_st_i32:
527         CASE_64(st32)
528             tci_args_rrs(insn, &r0, &r1, &ofs);
529             ptr = (void *)(regs[r1] + ofs);
530             *(uint32_t *)ptr = regs[r0];
531             break;
532 
533             /* Arithmetic operations (mixed 32/64 bit). */
534 
535         CASE_32_64(add)
536             tci_args_rrr(insn, &r0, &r1, &r2);
537             regs[r0] = regs[r1] + regs[r2];
538             break;
539         CASE_32_64(sub)
540             tci_args_rrr(insn, &r0, &r1, &r2);
541             regs[r0] = regs[r1] - regs[r2];
542             break;
543         CASE_32_64(mul)
544             tci_args_rrr(insn, &r0, &r1, &r2);
545             regs[r0] = regs[r1] * regs[r2];
546             break;
547         CASE_32_64(and)
548             tci_args_rrr(insn, &r0, &r1, &r2);
549             regs[r0] = regs[r1] & regs[r2];
550             break;
551         CASE_32_64(or)
552             tci_args_rrr(insn, &r0, &r1, &r2);
553             regs[r0] = regs[r1] | regs[r2];
554             break;
555         CASE_32_64(xor)
556             tci_args_rrr(insn, &r0, &r1, &r2);
557             regs[r0] = regs[r1] ^ regs[r2];
558             break;
559 #if TCG_TARGET_HAS_andc_i32 || TCG_TARGET_HAS_andc_i64
560         CASE_32_64(andc)
561             tci_args_rrr(insn, &r0, &r1, &r2);
562             regs[r0] = regs[r1] & ~regs[r2];
563             break;
564 #endif
565 #if TCG_TARGET_HAS_orc_i32 || TCG_TARGET_HAS_orc_i64
566         CASE_32_64(orc)
567             tci_args_rrr(insn, &r0, &r1, &r2);
568             regs[r0] = regs[r1] | ~regs[r2];
569             break;
570 #endif
571 #if TCG_TARGET_HAS_eqv_i32 || TCG_TARGET_HAS_eqv_i64
572         CASE_32_64(eqv)
573             tci_args_rrr(insn, &r0, &r1, &r2);
574             regs[r0] = ~(regs[r1] ^ regs[r2]);
575             break;
576 #endif
577 #if TCG_TARGET_HAS_nand_i32 || TCG_TARGET_HAS_nand_i64
578         CASE_32_64(nand)
579             tci_args_rrr(insn, &r0, &r1, &r2);
580             regs[r0] = ~(regs[r1] & regs[r2]);
581             break;
582 #endif
583 #if TCG_TARGET_HAS_nor_i32 || TCG_TARGET_HAS_nor_i64
584         CASE_32_64(nor)
585             tci_args_rrr(insn, &r0, &r1, &r2);
586             regs[r0] = ~(regs[r1] | regs[r2]);
587             break;
588 #endif
589 
590             /* Arithmetic operations (32 bit). */
591 
592         case INDEX_op_div_i32:
593             tci_args_rrr(insn, &r0, &r1, &r2);
594             regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
595             break;
596         case INDEX_op_divu_i32:
597             tci_args_rrr(insn, &r0, &r1, &r2);
598             regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
599             break;
600         case INDEX_op_rem_i32:
601             tci_args_rrr(insn, &r0, &r1, &r2);
602             regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
603             break;
604         case INDEX_op_remu_i32:
605             tci_args_rrr(insn, &r0, &r1, &r2);
606             regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
607             break;
608 #if TCG_TARGET_HAS_clz_i32
609         case INDEX_op_clz_i32:
610             tci_args_rrr(insn, &r0, &r1, &r2);
611             tmp32 = regs[r1];
612             regs[r0] = tmp32 ? clz32(tmp32) : regs[r2];
613             break;
614 #endif
615 #if TCG_TARGET_HAS_ctz_i32
616         case INDEX_op_ctz_i32:
617             tci_args_rrr(insn, &r0, &r1, &r2);
618             tmp32 = regs[r1];
619             regs[r0] = tmp32 ? ctz32(tmp32) : regs[r2];
620             break;
621 #endif
622 #if TCG_TARGET_HAS_ctpop_i32
623         case INDEX_op_ctpop_i32:
624             tci_args_rr(insn, &r0, &r1);
625             regs[r0] = ctpop32(regs[r1]);
626             break;
627 #endif
628 
629             /* Shift/rotate operations (32 bit). */
630 
631         case INDEX_op_shl_i32:
632             tci_args_rrr(insn, &r0, &r1, &r2);
633             regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
634             break;
635         case INDEX_op_shr_i32:
636             tci_args_rrr(insn, &r0, &r1, &r2);
637             regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
638             break;
639         case INDEX_op_sar_i32:
640             tci_args_rrr(insn, &r0, &r1, &r2);
641             regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
642             break;
643 #if TCG_TARGET_HAS_rot_i32
644         case INDEX_op_rotl_i32:
645             tci_args_rrr(insn, &r0, &r1, &r2);
646             regs[r0] = rol32(regs[r1], regs[r2] & 31);
647             break;
648         case INDEX_op_rotr_i32:
649             tci_args_rrr(insn, &r0, &r1, &r2);
650             regs[r0] = ror32(regs[r1], regs[r2] & 31);
651             break;
652 #endif
653 #if TCG_TARGET_HAS_deposit_i32
654         case INDEX_op_deposit_i32:
655             tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
656             regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
657             break;
658 #endif
659 #if TCG_TARGET_HAS_extract_i32
660         case INDEX_op_extract_i32:
661             tci_args_rrbb(insn, &r0, &r1, &pos, &len);
662             regs[r0] = extract32(regs[r1], pos, len);
663             break;
664 #endif
665 #if TCG_TARGET_HAS_sextract_i32
666         case INDEX_op_sextract_i32:
667             tci_args_rrbb(insn, &r0, &r1, &pos, &len);
668             regs[r0] = sextract32(regs[r1], pos, len);
669             break;
670 #endif
671         case INDEX_op_brcond_i32:
672             tci_args_rl(insn, tb_ptr, &r0, &ptr);
673             if ((uint32_t)regs[r0]) {
674                 tb_ptr = ptr;
675             }
676             break;
677 #if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_add2_i32
678         case INDEX_op_add2_i32:
679             tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
680             T1 = tci_uint64(regs[r3], regs[r2]);
681             T2 = tci_uint64(regs[r5], regs[r4]);
682             tci_write_reg64(regs, r1, r0, T1 + T2);
683             break;
684 #endif
685 #if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_sub2_i32
686         case INDEX_op_sub2_i32:
687             tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
688             T1 = tci_uint64(regs[r3], regs[r2]);
689             T2 = tci_uint64(regs[r5], regs[r4]);
690             tci_write_reg64(regs, r1, r0, T1 - T2);
691             break;
692 #endif
693 #if TCG_TARGET_HAS_mulu2_i32
694         case INDEX_op_mulu2_i32:
695             tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
696             tmp64 = (uint64_t)(uint32_t)regs[r2] * (uint32_t)regs[r3];
697             tci_write_reg64(regs, r1, r0, tmp64);
698             break;
699 #endif
700 #if TCG_TARGET_HAS_muls2_i32
701         case INDEX_op_muls2_i32:
702             tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
703             tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3];
704             tci_write_reg64(regs, r1, r0, tmp64);
705             break;
706 #endif
707 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
708         CASE_32_64(ext8s)
709             tci_args_rr(insn, &r0, &r1);
710             regs[r0] = (int8_t)regs[r1];
711             break;
712 #endif
713 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 || \
714     TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
715         CASE_32_64(ext16s)
716             tci_args_rr(insn, &r0, &r1);
717             regs[r0] = (int16_t)regs[r1];
718             break;
719 #endif
720 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
721         CASE_32_64(ext8u)
722             tci_args_rr(insn, &r0, &r1);
723             regs[r0] = (uint8_t)regs[r1];
724             break;
725 #endif
726 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
727         CASE_32_64(ext16u)
728             tci_args_rr(insn, &r0, &r1);
729             regs[r0] = (uint16_t)regs[r1];
730             break;
731 #endif
732 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
733         CASE_32_64(bswap16)
734             tci_args_rr(insn, &r0, &r1);
735             regs[r0] = bswap16(regs[r1]);
736             break;
737 #endif
738 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
739         CASE_32_64(bswap32)
740             tci_args_rr(insn, &r0, &r1);
741             regs[r0] = bswap32(regs[r1]);
742             break;
743 #endif
744 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
745         CASE_32_64(not)
746             tci_args_rr(insn, &r0, &r1);
747             regs[r0] = ~regs[r1];
748             break;
749 #endif
750         CASE_32_64(neg)
751             tci_args_rr(insn, &r0, &r1);
752             regs[r0] = -regs[r1];
753             break;
754 #if TCG_TARGET_REG_BITS == 64
755             /* Load/store operations (64 bit). */
756 
757         case INDEX_op_ld32s_i64:
758             tci_args_rrs(insn, &r0, &r1, &ofs);
759             ptr = (void *)(regs[r1] + ofs);
760             regs[r0] = *(int32_t *)ptr;
761             break;
762         case INDEX_op_ld_i64:
763             tci_args_rrs(insn, &r0, &r1, &ofs);
764             ptr = (void *)(regs[r1] + ofs);
765             regs[r0] = *(uint64_t *)ptr;
766             break;
767         case INDEX_op_st_i64:
768             tci_args_rrs(insn, &r0, &r1, &ofs);
769             ptr = (void *)(regs[r1] + ofs);
770             *(uint64_t *)ptr = regs[r0];
771             break;
772 
773             /* Arithmetic operations (64 bit). */
774 
775         case INDEX_op_div_i64:
776             tci_args_rrr(insn, &r0, &r1, &r2);
777             regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
778             break;
779         case INDEX_op_divu_i64:
780             tci_args_rrr(insn, &r0, &r1, &r2);
781             regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
782             break;
783         case INDEX_op_rem_i64:
784             tci_args_rrr(insn, &r0, &r1, &r2);
785             regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
786             break;
787         case INDEX_op_remu_i64:
788             tci_args_rrr(insn, &r0, &r1, &r2);
789             regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
790             break;
791 #if TCG_TARGET_HAS_clz_i64
792         case INDEX_op_clz_i64:
793             tci_args_rrr(insn, &r0, &r1, &r2);
794             regs[r0] = regs[r1] ? clz64(regs[r1]) : regs[r2];
795             break;
796 #endif
797 #if TCG_TARGET_HAS_ctz_i64
798         case INDEX_op_ctz_i64:
799             tci_args_rrr(insn, &r0, &r1, &r2);
800             regs[r0] = regs[r1] ? ctz64(regs[r1]) : regs[r2];
801             break;
802 #endif
803 #if TCG_TARGET_HAS_ctpop_i64
804         case INDEX_op_ctpop_i64:
805             tci_args_rr(insn, &r0, &r1);
806             regs[r0] = ctpop64(regs[r1]);
807             break;
808 #endif
809 #if TCG_TARGET_HAS_mulu2_i64
810         case INDEX_op_mulu2_i64:
811             tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
812             mulu64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
813             break;
814 #endif
815 #if TCG_TARGET_HAS_muls2_i64
816         case INDEX_op_muls2_i64:
817             tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
818             muls64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
819             break;
820 #endif
821 #if TCG_TARGET_HAS_add2_i64
822         case INDEX_op_add2_i64:
823             tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
824             T1 = regs[r2] + regs[r4];
825             T2 = regs[r3] + regs[r5] + (T1 < regs[r2]);
826             regs[r0] = T1;
827             regs[r1] = T2;
828             break;
829 #endif
830 #if TCG_TARGET_HAS_add2_i64
831         case INDEX_op_sub2_i64:
832             tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
833             T1 = regs[r2] - regs[r4];
834             T2 = regs[r3] - regs[r5] - (regs[r2] < regs[r4]);
835             regs[r0] = T1;
836             regs[r1] = T2;
837             break;
838 #endif
839 
840             /* Shift/rotate operations (64 bit). */
841 
842         case INDEX_op_shl_i64:
843             tci_args_rrr(insn, &r0, &r1, &r2);
844             regs[r0] = regs[r1] << (regs[r2] & 63);
845             break;
846         case INDEX_op_shr_i64:
847             tci_args_rrr(insn, &r0, &r1, &r2);
848             regs[r0] = regs[r1] >> (regs[r2] & 63);
849             break;
850         case INDEX_op_sar_i64:
851             tci_args_rrr(insn, &r0, &r1, &r2);
852             regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
853             break;
854 #if TCG_TARGET_HAS_rot_i64
855         case INDEX_op_rotl_i64:
856             tci_args_rrr(insn, &r0, &r1, &r2);
857             regs[r0] = rol64(regs[r1], regs[r2] & 63);
858             break;
859         case INDEX_op_rotr_i64:
860             tci_args_rrr(insn, &r0, &r1, &r2);
861             regs[r0] = ror64(regs[r1], regs[r2] & 63);
862             break;
863 #endif
864 #if TCG_TARGET_HAS_deposit_i64
865         case INDEX_op_deposit_i64:
866             tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
867             regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
868             break;
869 #endif
870 #if TCG_TARGET_HAS_extract_i64
871         case INDEX_op_extract_i64:
872             tci_args_rrbb(insn, &r0, &r1, &pos, &len);
873             regs[r0] = extract64(regs[r1], pos, len);
874             break;
875 #endif
876 #if TCG_TARGET_HAS_sextract_i64
877         case INDEX_op_sextract_i64:
878             tci_args_rrbb(insn, &r0, &r1, &pos, &len);
879             regs[r0] = sextract64(regs[r1], pos, len);
880             break;
881 #endif
882         case INDEX_op_brcond_i64:
883             tci_args_rl(insn, tb_ptr, &r0, &ptr);
884             if (regs[r0]) {
885                 tb_ptr = ptr;
886             }
887             break;
888         case INDEX_op_ext32s_i64:
889         case INDEX_op_ext_i32_i64:
890             tci_args_rr(insn, &r0, &r1);
891             regs[r0] = (int32_t)regs[r1];
892             break;
893         case INDEX_op_ext32u_i64:
894         case INDEX_op_extu_i32_i64:
895             tci_args_rr(insn, &r0, &r1);
896             regs[r0] = (uint32_t)regs[r1];
897             break;
898 #if TCG_TARGET_HAS_bswap64_i64
899         case INDEX_op_bswap64_i64:
900             tci_args_rr(insn, &r0, &r1);
901             regs[r0] = bswap64(regs[r1]);
902             break;
903 #endif
904 #endif /* TCG_TARGET_REG_BITS == 64 */
905 
906             /* QEMU specific operations. */
907 
908         case INDEX_op_exit_tb:
909             tci_args_l(insn, tb_ptr, &ptr);
910             return (uintptr_t)ptr;
911 
912         case INDEX_op_goto_tb:
913             tci_args_l(insn, tb_ptr, &ptr);
914             tb_ptr = *(void **)ptr;
915             break;
916 
917         case INDEX_op_goto_ptr:
918             tci_args_r(insn, &r0);
919             ptr = (void *)regs[r0];
920             if (!ptr) {
921                 return 0;
922             }
923             tb_ptr = ptr;
924             break;
925 
926         case INDEX_op_qemu_ld_a32_i32:
927             tci_args_rrm(insn, &r0, &r1, &oi);
928             taddr = (uint32_t)regs[r1];
929             goto do_ld_i32;
930         case INDEX_op_qemu_ld_a64_i32:
931             if (TCG_TARGET_REG_BITS == 64) {
932                 tci_args_rrm(insn, &r0, &r1, &oi);
933                 taddr = regs[r1];
934             } else {
935                 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
936                 taddr = tci_uint64(regs[r2], regs[r1]);
937                 oi = regs[r3];
938             }
939         do_ld_i32:
940             regs[r0] = tci_qemu_ld(env, taddr, oi, tb_ptr);
941             break;
942 
943         case INDEX_op_qemu_ld_a32_i64:
944             if (TCG_TARGET_REG_BITS == 64) {
945                 tci_args_rrm(insn, &r0, &r1, &oi);
946                 taddr = (uint32_t)regs[r1];
947             } else {
948                 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
949                 taddr = (uint32_t)regs[r2];
950                 oi = regs[r3];
951             }
952             goto do_ld_i64;
953         case INDEX_op_qemu_ld_a64_i64:
954             if (TCG_TARGET_REG_BITS == 64) {
955                 tci_args_rrm(insn, &r0, &r1, &oi);
956                 taddr = regs[r1];
957             } else {
958                 tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
959                 taddr = tci_uint64(regs[r3], regs[r2]);
960                 oi = regs[r4];
961             }
962         do_ld_i64:
963             tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr);
964             if (TCG_TARGET_REG_BITS == 32) {
965                 tci_write_reg64(regs, r1, r0, tmp64);
966             } else {
967                 regs[r0] = tmp64;
968             }
969             break;
970 
971         case INDEX_op_qemu_st_a32_i32:
972             tci_args_rrm(insn, &r0, &r1, &oi);
973             taddr = (uint32_t)regs[r1];
974             goto do_st_i32;
975         case INDEX_op_qemu_st_a64_i32:
976             if (TCG_TARGET_REG_BITS == 64) {
977                 tci_args_rrm(insn, &r0, &r1, &oi);
978                 taddr = regs[r1];
979             } else {
980                 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
981                 taddr = tci_uint64(regs[r2], regs[r1]);
982                 oi = regs[r3];
983             }
984         do_st_i32:
985             tci_qemu_st(env, taddr, regs[r0], oi, tb_ptr);
986             break;
987 
988         case INDEX_op_qemu_st_a32_i64:
989             if (TCG_TARGET_REG_BITS == 64) {
990                 tci_args_rrm(insn, &r0, &r1, &oi);
991                 tmp64 = regs[r0];
992                 taddr = (uint32_t)regs[r1];
993             } else {
994                 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
995                 tmp64 = tci_uint64(regs[r1], regs[r0]);
996                 taddr = (uint32_t)regs[r2];
997                 oi = regs[r3];
998             }
999             goto do_st_i64;
1000         case INDEX_op_qemu_st_a64_i64:
1001             if (TCG_TARGET_REG_BITS == 64) {
1002                 tci_args_rrm(insn, &r0, &r1, &oi);
1003                 tmp64 = regs[r0];
1004                 taddr = regs[r1];
1005             } else {
1006                 tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1007                 tmp64 = tci_uint64(regs[r1], regs[r0]);
1008                 taddr = tci_uint64(regs[r3], regs[r2]);
1009                 oi = regs[r4];
1010             }
1011         do_st_i64:
1012             tci_qemu_st(env, taddr, tmp64, oi, tb_ptr);
1013             break;
1014 
1015         case INDEX_op_mb:
1016             /* Ensure ordering for all kinds */
1017             smp_mb();
1018             break;
1019         default:
1020             g_assert_not_reached();
1021         }
1022     }
1023 }
1024 
1025 /*
1026  * Disassembler that matches the interpreter
1027  */
1028 
1029 static const char *str_r(TCGReg r)
1030 {
1031     static const char regs[TCG_TARGET_NB_REGS][4] = {
1032         "r0", "r1", "r2",  "r3",  "r4",  "r5",  "r6",  "r7",
1033         "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
1034     };
1035 
1036     QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14);
1037     QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15);
1038 
1039     assert((unsigned)r < TCG_TARGET_NB_REGS);
1040     return regs[r];
1041 }
1042 
1043 static const char *str_c(TCGCond c)
1044 {
1045     static const char cond[16][8] = {
1046         [TCG_COND_NEVER] = "never",
1047         [TCG_COND_ALWAYS] = "always",
1048         [TCG_COND_EQ] = "eq",
1049         [TCG_COND_NE] = "ne",
1050         [TCG_COND_LT] = "lt",
1051         [TCG_COND_GE] = "ge",
1052         [TCG_COND_LE] = "le",
1053         [TCG_COND_GT] = "gt",
1054         [TCG_COND_LTU] = "ltu",
1055         [TCG_COND_GEU] = "geu",
1056         [TCG_COND_LEU] = "leu",
1057         [TCG_COND_GTU] = "gtu",
1058         [TCG_COND_TSTEQ] = "tsteq",
1059         [TCG_COND_TSTNE] = "tstne",
1060     };
1061 
1062     assert((unsigned)c < ARRAY_SIZE(cond));
1063     assert(cond[c][0] != 0);
1064     return cond[c];
1065 }
1066 
1067 /* Disassemble TCI bytecode. */
1068 int print_insn_tci(bfd_vma addr, disassemble_info *info)
1069 {
1070     const uint32_t *tb_ptr = (const void *)(uintptr_t)addr;
1071     const TCGOpDef *def;
1072     const char *op_name;
1073     uint32_t insn;
1074     TCGOpcode op;
1075     TCGReg r0, r1, r2, r3, r4, r5;
1076     tcg_target_ulong i1;
1077     int32_t s2;
1078     TCGCond c;
1079     MemOpIdx oi;
1080     uint8_t pos, len;
1081     void *ptr;
1082 
1083     /* TCI is always the host, so we don't need to load indirect. */
1084     insn = *tb_ptr++;
1085 
1086     info->fprintf_func(info->stream, "%08x  ", insn);
1087 
1088     op = extract32(insn, 0, 8);
1089     def = &tcg_op_defs[op];
1090     op_name = def->name;
1091 
1092     switch (op) {
1093     case INDEX_op_br:
1094     case INDEX_op_exit_tb:
1095     case INDEX_op_goto_tb:
1096         tci_args_l(insn, tb_ptr, &ptr);
1097         info->fprintf_func(info->stream, "%-12s  %p", op_name, ptr);
1098         break;
1099 
1100     case INDEX_op_goto_ptr:
1101         tci_args_r(insn, &r0);
1102         info->fprintf_func(info->stream, "%-12s  %s", op_name, str_r(r0));
1103         break;
1104 
1105     case INDEX_op_call:
1106         tci_args_nl(insn, tb_ptr, &len, &ptr);
1107         info->fprintf_func(info->stream, "%-12s  %d, %p", op_name, len, ptr);
1108         break;
1109 
1110     case INDEX_op_brcond_i32:
1111     case INDEX_op_brcond_i64:
1112         tci_args_rl(insn, tb_ptr, &r0, &ptr);
1113         info->fprintf_func(info->stream, "%-12s  %s, 0, ne, %p",
1114                            op_name, str_r(r0), ptr);
1115         break;
1116 
1117     case INDEX_op_setcond_i32:
1118     case INDEX_op_setcond_i64:
1119         tci_args_rrrc(insn, &r0, &r1, &r2, &c);
1120         info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s",
1121                            op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c));
1122         break;
1123 
1124     case INDEX_op_tci_movi:
1125         tci_args_ri(insn, &r0, &i1);
1126         info->fprintf_func(info->stream, "%-12s  %s, 0x%" TCG_PRIlx,
1127                            op_name, str_r(r0), i1);
1128         break;
1129 
1130     case INDEX_op_tci_movl:
1131         tci_args_rl(insn, tb_ptr, &r0, &ptr);
1132         info->fprintf_func(info->stream, "%-12s  %s, %p",
1133                            op_name, str_r(r0), ptr);
1134         break;
1135 
1136     case INDEX_op_ld8u_i32:
1137     case INDEX_op_ld8u_i64:
1138     case INDEX_op_ld8s_i32:
1139     case INDEX_op_ld8s_i64:
1140     case INDEX_op_ld16u_i32:
1141     case INDEX_op_ld16u_i64:
1142     case INDEX_op_ld16s_i32:
1143     case INDEX_op_ld16s_i64:
1144     case INDEX_op_ld32u_i64:
1145     case INDEX_op_ld32s_i64:
1146     case INDEX_op_ld_i32:
1147     case INDEX_op_ld_i64:
1148     case INDEX_op_st8_i32:
1149     case INDEX_op_st8_i64:
1150     case INDEX_op_st16_i32:
1151     case INDEX_op_st16_i64:
1152     case INDEX_op_st32_i64:
1153     case INDEX_op_st_i32:
1154     case INDEX_op_st_i64:
1155         tci_args_rrs(insn, &r0, &r1, &s2);
1156         info->fprintf_func(info->stream, "%-12s  %s, %s, %d",
1157                            op_name, str_r(r0), str_r(r1), s2);
1158         break;
1159 
1160     case INDEX_op_mov_i32:
1161     case INDEX_op_mov_i64:
1162     case INDEX_op_ext8s_i32:
1163     case INDEX_op_ext8s_i64:
1164     case INDEX_op_ext8u_i32:
1165     case INDEX_op_ext8u_i64:
1166     case INDEX_op_ext16s_i32:
1167     case INDEX_op_ext16s_i64:
1168     case INDEX_op_ext16u_i32:
1169     case INDEX_op_ext32s_i64:
1170     case INDEX_op_ext32u_i64:
1171     case INDEX_op_ext_i32_i64:
1172     case INDEX_op_extu_i32_i64:
1173     case INDEX_op_bswap16_i32:
1174     case INDEX_op_bswap16_i64:
1175     case INDEX_op_bswap32_i32:
1176     case INDEX_op_bswap32_i64:
1177     case INDEX_op_bswap64_i64:
1178     case INDEX_op_not_i32:
1179     case INDEX_op_not_i64:
1180     case INDEX_op_neg_i32:
1181     case INDEX_op_neg_i64:
1182     case INDEX_op_ctpop_i32:
1183     case INDEX_op_ctpop_i64:
1184         tci_args_rr(insn, &r0, &r1);
1185         info->fprintf_func(info->stream, "%-12s  %s, %s",
1186                            op_name, str_r(r0), str_r(r1));
1187         break;
1188 
1189     case INDEX_op_add_i32:
1190     case INDEX_op_add_i64:
1191     case INDEX_op_sub_i32:
1192     case INDEX_op_sub_i64:
1193     case INDEX_op_mul_i32:
1194     case INDEX_op_mul_i64:
1195     case INDEX_op_and_i32:
1196     case INDEX_op_and_i64:
1197     case INDEX_op_or_i32:
1198     case INDEX_op_or_i64:
1199     case INDEX_op_xor_i32:
1200     case INDEX_op_xor_i64:
1201     case INDEX_op_andc_i32:
1202     case INDEX_op_andc_i64:
1203     case INDEX_op_orc_i32:
1204     case INDEX_op_orc_i64:
1205     case INDEX_op_eqv_i32:
1206     case INDEX_op_eqv_i64:
1207     case INDEX_op_nand_i32:
1208     case INDEX_op_nand_i64:
1209     case INDEX_op_nor_i32:
1210     case INDEX_op_nor_i64:
1211     case INDEX_op_div_i32:
1212     case INDEX_op_div_i64:
1213     case INDEX_op_rem_i32:
1214     case INDEX_op_rem_i64:
1215     case INDEX_op_divu_i32:
1216     case INDEX_op_divu_i64:
1217     case INDEX_op_remu_i32:
1218     case INDEX_op_remu_i64:
1219     case INDEX_op_shl_i32:
1220     case INDEX_op_shl_i64:
1221     case INDEX_op_shr_i32:
1222     case INDEX_op_shr_i64:
1223     case INDEX_op_sar_i32:
1224     case INDEX_op_sar_i64:
1225     case INDEX_op_rotl_i32:
1226     case INDEX_op_rotl_i64:
1227     case INDEX_op_rotr_i32:
1228     case INDEX_op_rotr_i64:
1229     case INDEX_op_clz_i32:
1230     case INDEX_op_clz_i64:
1231     case INDEX_op_ctz_i32:
1232     case INDEX_op_ctz_i64:
1233         tci_args_rrr(insn, &r0, &r1, &r2);
1234         info->fprintf_func(info->stream, "%-12s  %s, %s, %s",
1235                            op_name, str_r(r0), str_r(r1), str_r(r2));
1236         break;
1237 
1238     case INDEX_op_deposit_i32:
1239     case INDEX_op_deposit_i64:
1240         tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
1241         info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %d, %d",
1242                            op_name, str_r(r0), str_r(r1), str_r(r2), pos, len);
1243         break;
1244 
1245     case INDEX_op_extract_i32:
1246     case INDEX_op_extract_i64:
1247     case INDEX_op_sextract_i32:
1248     case INDEX_op_sextract_i64:
1249         tci_args_rrbb(insn, &r0, &r1, &pos, &len);
1250         info->fprintf_func(info->stream, "%-12s  %s,%s,%d,%d",
1251                            op_name, str_r(r0), str_r(r1), pos, len);
1252         break;
1253 
1254     case INDEX_op_movcond_i32:
1255     case INDEX_op_movcond_i64:
1256     case INDEX_op_setcond2_i32:
1257         tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &c);
1258         info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s, %s, %s",
1259                            op_name, str_r(r0), str_r(r1), str_r(r2),
1260                            str_r(r3), str_r(r4), str_c(c));
1261         break;
1262 
1263     case INDEX_op_mulu2_i32:
1264     case INDEX_op_mulu2_i64:
1265     case INDEX_op_muls2_i32:
1266     case INDEX_op_muls2_i64:
1267         tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
1268         info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s",
1269                            op_name, str_r(r0), str_r(r1),
1270                            str_r(r2), str_r(r3));
1271         break;
1272 
1273     case INDEX_op_add2_i32:
1274     case INDEX_op_add2_i64:
1275     case INDEX_op_sub2_i32:
1276     case INDEX_op_sub2_i64:
1277         tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
1278         info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s, %s, %s",
1279                            op_name, str_r(r0), str_r(r1), str_r(r2),
1280                            str_r(r3), str_r(r4), str_r(r5));
1281         break;
1282 
1283     case INDEX_op_qemu_ld_a32_i32:
1284     case INDEX_op_qemu_st_a32_i32:
1285         len = 1 + 1;
1286         goto do_qemu_ldst;
1287     case INDEX_op_qemu_ld_a32_i64:
1288     case INDEX_op_qemu_st_a32_i64:
1289     case INDEX_op_qemu_ld_a64_i32:
1290     case INDEX_op_qemu_st_a64_i32:
1291         len = 1 + DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1292         goto do_qemu_ldst;
1293     case INDEX_op_qemu_ld_a64_i64:
1294     case INDEX_op_qemu_st_a64_i64:
1295         len = 2 * DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1296         goto do_qemu_ldst;
1297     do_qemu_ldst:
1298         switch (len) {
1299         case 2:
1300             tci_args_rrm(insn, &r0, &r1, &oi);
1301             info->fprintf_func(info->stream, "%-12s  %s, %s, %x",
1302                                op_name, str_r(r0), str_r(r1), oi);
1303             break;
1304         case 3:
1305             tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
1306             info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s",
1307                                op_name, str_r(r0), str_r(r1),
1308                                str_r(r2), str_r(r3));
1309             break;
1310         case 4:
1311             tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1312             info->fprintf_func(info->stream, "%-12s  %s, %s, %s, %s, %s",
1313                                op_name, str_r(r0), str_r(r1),
1314                                str_r(r2), str_r(r3), str_r(r4));
1315             break;
1316         default:
1317             g_assert_not_reached();
1318         }
1319         break;
1320 
1321     case 0:
1322         /* tcg_out_nop_fill uses zeros */
1323         if (insn == 0) {
1324             info->fprintf_func(info->stream, "align");
1325             break;
1326         }
1327         /* fall through */
1328 
1329     default:
1330         info->fprintf_func(info->stream, "illegal opcode %d", op);
1331         break;
1332     }
1333 
1334     return sizeof(insn);
1335 }
1336