xref: /qemu/target/ppc/translate.c (revision c0e6b8b798bee5d8772ca8db19638ec89b47c946)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "tcg/tcg-op-gvec.h"
27 #include "qemu/host-utils.h"
28 
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/translation-block.h"
34 #include "exec/log.h"
35 #include "qemu/atomic128.h"
36 #include "spr_common.h"
37 #include "power8-pmu.h"
38 
39 #include "qemu/qemu-print.h"
40 #include "qapi/error.h"
41 
42 #define HELPER_H "helper.h"
43 #include "exec/helper-info.c.inc"
44 #undef  HELPER_H
45 
46 #define CPU_SINGLE_STEP 0x1
47 #define CPU_BRANCH_STEP 0x2
48 
49 /* Include definitions for instructions classes and implementations flags */
50 /* #define PPC_DEBUG_DISAS */
51 
52 #ifdef PPC_DEBUG_DISAS
53 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
54 #else
55 #  define LOG_DISAS(...) do { } while (0)
56 #endif
57 /*****************************************************************************/
58 /* Code translation helpers                                                  */
59 
60 /* global register indexes */
61 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
62                           + 10 * 4 + 22 * 5 /* SPE GPRh */
63                           + 8 * 5           /* CRF */];
64 static TCGv cpu_gpr[32];
65 static TCGv cpu_gprh[32];
66 static TCGv_i32 cpu_crf[8];
67 static TCGv cpu_nip;
68 static TCGv cpu_msr;
69 static TCGv cpu_ctr;
70 static TCGv cpu_lr;
71 #if defined(TARGET_PPC64)
72 static TCGv cpu_cfar;
73 #endif
74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
75 static TCGv cpu_reserve;
76 static TCGv cpu_reserve_length;
77 static TCGv cpu_reserve_val;
78 #if defined(TARGET_PPC64)
79 static TCGv cpu_reserve_val2;
80 #endif
81 static TCGv cpu_fpscr;
82 static TCGv_i32 cpu_access_type;
83 
84 void ppc_translate_init(void)
85 {
86     int i;
87     char *p;
88     size_t cpu_reg_names_size;
89 
90     p = cpu_reg_names;
91     cpu_reg_names_size = sizeof(cpu_reg_names);
92 
93     for (i = 0; i < 8; i++) {
94         snprintf(p, cpu_reg_names_size, "crf%d", i);
95         cpu_crf[i] = tcg_global_mem_new_i32(tcg_env,
96                                             offsetof(CPUPPCState, crf[i]), p);
97         p += 5;
98         cpu_reg_names_size -= 5;
99     }
100 
101     for (i = 0; i < 32; i++) {
102         snprintf(p, cpu_reg_names_size, "r%d", i);
103         cpu_gpr[i] = tcg_global_mem_new(tcg_env,
104                                         offsetof(CPUPPCState, gpr[i]), p);
105         p += (i < 10) ? 3 : 4;
106         cpu_reg_names_size -= (i < 10) ? 3 : 4;
107         snprintf(p, cpu_reg_names_size, "r%dH", i);
108         cpu_gprh[i] = tcg_global_mem_new(tcg_env,
109                                          offsetof(CPUPPCState, gprh[i]), p);
110         p += (i < 10) ? 4 : 5;
111         cpu_reg_names_size -= (i < 10) ? 4 : 5;
112     }
113 
114     cpu_nip = tcg_global_mem_new(tcg_env,
115                                  offsetof(CPUPPCState, nip), "nip");
116 
117     cpu_msr = tcg_global_mem_new(tcg_env,
118                                  offsetof(CPUPPCState, msr), "msr");
119 
120     cpu_ctr = tcg_global_mem_new(tcg_env,
121                                  offsetof(CPUPPCState, ctr), "ctr");
122 
123     cpu_lr = tcg_global_mem_new(tcg_env,
124                                 offsetof(CPUPPCState, lr), "lr");
125 
126 #if defined(TARGET_PPC64)
127     cpu_cfar = tcg_global_mem_new(tcg_env,
128                                   offsetof(CPUPPCState, cfar), "cfar");
129 #endif
130 
131     cpu_xer = tcg_global_mem_new(tcg_env,
132                                  offsetof(CPUPPCState, xer), "xer");
133     cpu_so = tcg_global_mem_new(tcg_env,
134                                 offsetof(CPUPPCState, so), "SO");
135     cpu_ov = tcg_global_mem_new(tcg_env,
136                                 offsetof(CPUPPCState, ov), "OV");
137     cpu_ca = tcg_global_mem_new(tcg_env,
138                                 offsetof(CPUPPCState, ca), "CA");
139     cpu_ov32 = tcg_global_mem_new(tcg_env,
140                                   offsetof(CPUPPCState, ov32), "OV32");
141     cpu_ca32 = tcg_global_mem_new(tcg_env,
142                                   offsetof(CPUPPCState, ca32), "CA32");
143 
144     cpu_reserve = tcg_global_mem_new(tcg_env,
145                                      offsetof(CPUPPCState, reserve_addr),
146                                      "reserve_addr");
147     cpu_reserve_length = tcg_global_mem_new(tcg_env,
148                                             offsetof(CPUPPCState,
149                                                      reserve_length),
150                                             "reserve_length");
151     cpu_reserve_val = tcg_global_mem_new(tcg_env,
152                                          offsetof(CPUPPCState, reserve_val),
153                                          "reserve_val");
154 #if defined(TARGET_PPC64)
155     cpu_reserve_val2 = tcg_global_mem_new(tcg_env,
156                                           offsetof(CPUPPCState, reserve_val2),
157                                           "reserve_val2");
158 #endif
159 
160     cpu_fpscr = tcg_global_mem_new(tcg_env,
161                                    offsetof(CPUPPCState, fpscr), "fpscr");
162 
163     cpu_access_type = tcg_global_mem_new_i32(tcg_env,
164                                              offsetof(CPUPPCState, access_type),
165                                              "access_type");
166 }
167 
168 /* internal defines */
169 struct DisasContext {
170     DisasContextBase base;
171     target_ulong cia;  /* current instruction address */
172     uint32_t opcode;
173     /* Routine used to access memory */
174     bool pr, hv, dr, le_mode;
175     bool lazy_tlb_flush;
176     bool need_access_type;
177     int mem_idx;
178     int access_type;
179     /* Translation flags */
180     MemOp default_tcg_memop_mask;
181 #if defined(TARGET_PPC64)
182     powerpc_excp_t excp_model;
183     bool sf_mode;
184     bool has_cfar;
185     bool has_bhrb;
186 #endif
187     bool fpu_enabled;
188     bool altivec_enabled;
189     bool vsx_enabled;
190     bool spe_enabled;
191     bool tm_enabled;
192     bool gtse;
193     bool hr;
194     bool mmcr0_pmcc0;
195     bool mmcr0_pmcc1;
196     bool mmcr0_pmcjce;
197     bool pmc_other;
198     bool pmu_insn_cnt;
199     bool bhrb_enable;
200     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
201     int singlestep_enabled;
202     uint32_t flags;
203     uint64_t insns_flags;
204     uint64_t insns_flags2;
205 };
206 
207 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
208 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
209 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
210 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
211 
212 /* Return true iff byteswap is needed in a scalar memop */
213 static inline bool need_byteswap(const DisasContext *ctx)
214 {
215 #if TARGET_BIG_ENDIAN
216      return ctx->le_mode;
217 #else
218      return !ctx->le_mode;
219 #endif
220 }
221 
222 /* True when active word size < size of target_long.  */
223 #ifdef TARGET_PPC64
224 # define NARROW_MODE(C)  (!(C)->sf_mode)
225 #else
226 # define NARROW_MODE(C)  0
227 #endif
228 
229 struct opc_handler_t {
230     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
231     uint32_t inval1;
232     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
233     uint32_t inval2;
234     /* instruction type */
235     uint64_t type;
236     /* extended instruction type */
237     uint64_t type2;
238     /* handler */
239     void (*handler)(DisasContext *ctx);
240 };
241 
242 static inline bool gen_serialize(DisasContext *ctx)
243 {
244     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
245         /* Restart with exclusive lock.  */
246         gen_helper_exit_atomic(tcg_env);
247         ctx->base.is_jmp = DISAS_NORETURN;
248         return false;
249     }
250     return true;
251 }
252 
253 #if !defined(CONFIG_USER_ONLY)
254 #if defined(TARGET_PPC64)
255 static inline bool gen_serialize_core(DisasContext *ctx)
256 {
257     if (ctx->flags & POWERPC_FLAG_SMT) {
258         return gen_serialize(ctx);
259     }
260     return true;
261 }
262 #endif
263 
264 static inline bool gen_serialize_core_lpar(DisasContext *ctx)
265 {
266 #if defined(TARGET_PPC64)
267     if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
268         return gen_serialize(ctx);
269     }
270 #endif
271     return true;
272 }
273 #endif
274 
275 /* SPR load/store helpers */
276 static inline void gen_load_spr(TCGv t, int reg)
277 {
278     tcg_gen_ld_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
279 }
280 
281 static inline void gen_store_spr(int reg, TCGv t)
282 {
283     tcg_gen_st_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
284 }
285 
286 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
287 {
288     if (ctx->need_access_type && ctx->access_type != access_type) {
289         tcg_gen_movi_i32(cpu_access_type, access_type);
290         ctx->access_type = access_type;
291     }
292 }
293 
294 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
295 {
296     if (NARROW_MODE(ctx)) {
297         nip = (uint32_t)nip;
298     }
299     tcg_gen_movi_tl(cpu_nip, nip);
300 }
301 
302 static void gen_exception_err_nip(DisasContext *ctx, uint32_t excp,
303                                   uint32_t error, target_ulong nip)
304 {
305     TCGv_i32 t0, t1;
306 
307     gen_update_nip(ctx, nip);
308     t0 = tcg_constant_i32(excp);
309     t1 = tcg_constant_i32(error);
310     gen_helper_raise_exception_err(tcg_env, t0, t1);
311     ctx->base.is_jmp = DISAS_NORETURN;
312 }
313 
314 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp,
315                                      uint32_t error)
316 {
317     /*
318      * These are all synchronous exceptions, we set the PC back to the
319      * faulting instruction
320      */
321     gen_exception_err_nip(ctx, excp, error, ctx->cia);
322 }
323 
324 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
325                               target_ulong nip)
326 {
327     TCGv_i32 t0;
328 
329     gen_update_nip(ctx, nip);
330     t0 = tcg_constant_i32(excp);
331     gen_helper_raise_exception(tcg_env, t0);
332     ctx->base.is_jmp = DISAS_NORETURN;
333 }
334 
335 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
336 {
337     /*
338      * These are all synchronous exceptions, we set the PC back to the
339      * faulting instruction
340      */
341     gen_exception_nip(ctx, excp, ctx->cia);
342 }
343 
344 #if !defined(CONFIG_USER_ONLY)
345 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
346 {
347     translator_io_start(&ctx->base);
348     gen_helper_ppc_maybe_interrupt(tcg_env);
349 }
350 #endif
351 
352 /*
353  * Tells the caller what is the appropriate exception to generate and prepares
354  * SPR registers for this exception.
355  *
356  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
357  * POWERPC_EXCP_DEBUG (on BookE).
358  */
359 static void gen_debug_exception(DisasContext *ctx, bool rfi_type)
360 {
361 #if !defined(CONFIG_USER_ONLY)
362     if (ctx->flags & POWERPC_FLAG_DE) {
363         target_ulong dbsr = 0;
364         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
365             dbsr = DBCR0_ICMP;
366         } else {
367             /* Must have been branch */
368             dbsr = DBCR0_BRT;
369         }
370         TCGv t0 = tcg_temp_new();
371         gen_load_spr(t0, SPR_BOOKE_DBSR);
372         tcg_gen_ori_tl(t0, t0, dbsr);
373         gen_store_spr(SPR_BOOKE_DBSR, t0);
374         gen_helper_raise_exception(tcg_env,
375                                    tcg_constant_i32(POWERPC_EXCP_DEBUG));
376         ctx->base.is_jmp = DISAS_NORETURN;
377     } else {
378         if (!rfi_type) { /* BookS does not single step rfi type instructions */
379             TCGv t0 = tcg_temp_new();
380             tcg_gen_movi_tl(t0, ctx->cia);
381             gen_helper_book3s_trace(tcg_env, t0);
382             ctx->base.is_jmp = DISAS_NORETURN;
383         }
384     }
385 #endif
386 }
387 
388 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
389 {
390     /* Will be converted to program check if needed */
391     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
392 }
393 
394 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
395 {
396     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
397 }
398 
399 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
400 {
401     /* Will be converted to program check if needed */
402     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
403 }
404 
405 /*****************************************************************************/
406 /* SPR READ/WRITE CALLBACKS */
407 
408 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
409 {
410 #if 0
411     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
412     printf("ERROR: try to access SPR %d !\n", sprn);
413 #endif
414 }
415 
416 /* #define PPC_DUMP_SPR_ACCESSES */
417 
418 /*
419  * Generic callbacks:
420  * do nothing but store/retrieve spr value
421  */
422 static void spr_load_dump_spr(int sprn)
423 {
424 #ifdef PPC_DUMP_SPR_ACCESSES
425     TCGv_i32 t0 = tcg_constant_i32(sprn);
426     gen_helper_load_dump_spr(tcg_env, t0);
427 #endif
428 }
429 
430 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
431 {
432     gen_load_spr(cpu_gpr[gprn], sprn);
433     spr_load_dump_spr(sprn);
434 }
435 
436 static void spr_store_dump_spr(int sprn)
437 {
438 #ifdef PPC_DUMP_SPR_ACCESSES
439     TCGv_i32 t0 = tcg_constant_i32(sprn);
440     gen_helper_store_dump_spr(tcg_env, t0);
441 #endif
442 }
443 
444 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
445 {
446     gen_store_spr(sprn, cpu_gpr[gprn]);
447     spr_store_dump_spr(sprn);
448 }
449 
450 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
451 {
452 #ifdef TARGET_PPC64
453     TCGv t0 = tcg_temp_new();
454     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
455     gen_store_spr(sprn, t0);
456     spr_store_dump_spr(sprn);
457 #else
458     spr_write_generic(ctx, sprn, gprn);
459 #endif
460 }
461 
462 void spr_core_write_generic(DisasContext *ctx, int sprn, int gprn)
463 {
464     if (!(ctx->flags & POWERPC_FLAG_SMT)) {
465         spr_write_generic(ctx, sprn, gprn);
466         return;
467     }
468 
469     if (!gen_serialize(ctx)) {
470         return;
471     }
472 
473     gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn),
474                                       cpu_gpr[gprn]);
475     spr_store_dump_spr(sprn);
476 }
477 
478 void spr_core_write_generic32(DisasContext *ctx, int sprn, int gprn)
479 {
480     TCGv t0;
481 
482     if (!(ctx->flags & POWERPC_FLAG_SMT)) {
483         spr_write_generic32(ctx, sprn, gprn);
484         return;
485     }
486 
487     if (!gen_serialize(ctx)) {
488         return;
489     }
490 
491     t0 = tcg_temp_new();
492     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
493     gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn), t0);
494     spr_store_dump_spr(sprn);
495 }
496 
497 void spr_core_lpar_write_generic(DisasContext *ctx, int sprn, int gprn)
498 {
499     if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
500         spr_core_write_generic(ctx, sprn, gprn);
501     } else {
502         spr_write_generic(ctx, sprn, gprn);
503     }
504 }
505 
506 static void spr_write_CTRL_ST(DisasContext *ctx, int sprn, int gprn)
507 {
508     /* This does not implement >1 thread */
509     TCGv t0 = tcg_temp_new();
510     TCGv t1 = tcg_temp_new();
511     tcg_gen_extract_tl(t0, cpu_gpr[gprn], 0, 1); /* Extract RUN field */
512     tcg_gen_shli_tl(t1, t0, 8); /* Duplicate the bit in TS */
513     tcg_gen_or_tl(t1, t1, t0);
514     gen_store_spr(sprn, t1);
515 }
516 
517 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
518 {
519     if (!(ctx->flags & POWERPC_FLAG_SMT_1LPAR)) {
520         /* CTRL behaves as 1-thread in LPAR-per-thread mode */
521         spr_write_CTRL_ST(ctx, sprn, gprn);
522         goto out;
523     }
524 
525     if (!gen_serialize(ctx)) {
526         return;
527     }
528 
529     gen_helper_spr_write_CTRL(tcg_env, tcg_constant_i32(sprn),
530                               cpu_gpr[gprn]);
531 out:
532     spr_store_dump_spr(sprn);
533 
534     /*
535      * SPR_CTRL writes must force a new translation block,
536      * allowing the PMU to calculate the run latch events with
537      * more accuracy.
538      */
539     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
540 }
541 
542 #if !defined(CONFIG_USER_ONLY)
543 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
544 {
545     TCGv t0 = tcg_temp_new();
546     TCGv t1 = tcg_temp_new();
547     gen_load_spr(t0, sprn);
548     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
549     tcg_gen_and_tl(t0, t0, t1);
550     gen_store_spr(sprn, t0);
551 }
552 
553 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
554 {
555 }
556 
557 #endif
558 
559 /* SPR common to all PowerPC */
560 /* XER */
561 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
562 {
563     TCGv dst = cpu_gpr[gprn];
564     TCGv t0 = tcg_temp_new();
565     TCGv t1 = tcg_temp_new();
566     TCGv t2 = tcg_temp_new();
567     tcg_gen_mov_tl(dst, cpu_xer);
568     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
569     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
570     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
571     tcg_gen_or_tl(t0, t0, t1);
572     tcg_gen_or_tl(dst, dst, t2);
573     tcg_gen_or_tl(dst, dst, t0);
574     if (is_isa300(ctx)) {
575         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
576         tcg_gen_or_tl(dst, dst, t0);
577         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
578         tcg_gen_or_tl(dst, dst, t0);
579     }
580 }
581 
582 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
583 {
584     TCGv src = cpu_gpr[gprn];
585     /* Write all flags, while reading back check for isa300 */
586     tcg_gen_andi_tl(cpu_xer, src,
587                     ~((1u << XER_SO) |
588                       (1u << XER_OV) | (1u << XER_OV32) |
589                       (1u << XER_CA) | (1u << XER_CA32)));
590     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
591     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
592     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
593     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
594     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
595 }
596 
597 /* LR */
598 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
599 {
600     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
601 }
602 
603 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
604 {
605     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
606 }
607 
608 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
609 /* Debug facilities */
610 /* CFAR */
611 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
612 {
613     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
614 }
615 
616 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
617 {
618     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
619 }
620 
621 /* Breakpoint */
622 void spr_write_ciabr(DisasContext *ctx, int sprn, int gprn)
623 {
624     translator_io_start(&ctx->base);
625     gen_helper_store_ciabr(tcg_env, cpu_gpr[gprn]);
626 }
627 
628 /* Watchpoint */
629 void spr_write_dawr0(DisasContext *ctx, int sprn, int gprn)
630 {
631     translator_io_start(&ctx->base);
632     gen_helper_store_dawr0(tcg_env, cpu_gpr[gprn]);
633 }
634 
635 void spr_write_dawrx0(DisasContext *ctx, int sprn, int gprn)
636 {
637     translator_io_start(&ctx->base);
638     gen_helper_store_dawrx0(tcg_env, cpu_gpr[gprn]);
639 }
640 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
641 
642 /* CTR */
643 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
644 {
645     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
646 }
647 
648 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
649 {
650     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
651 }
652 
653 /* User read access to SPR */
654 /* USPRx */
655 /* UMMCRx */
656 /* UPMCx */
657 /* USIA */
658 /* UDECR */
659 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
660 {
661     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
662 }
663 
664 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
665 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
666 {
667     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
668 }
669 #endif
670 
671 /* SPR common to all non-embedded PowerPC */
672 /* DECR */
673 #if !defined(CONFIG_USER_ONLY)
674 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
675 {
676     translator_io_start(&ctx->base);
677     gen_helper_load_decr(cpu_gpr[gprn], tcg_env);
678 }
679 
680 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
681 {
682     translator_io_start(&ctx->base);
683     gen_helper_store_decr(tcg_env, cpu_gpr[gprn]);
684 }
685 #endif
686 
687 /* SPR common to all non-embedded PowerPC, except 601 */
688 /* Time base */
689 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
690 {
691     translator_io_start(&ctx->base);
692     gen_helper_load_tbl(cpu_gpr[gprn], tcg_env);
693 }
694 
695 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
696 {
697     translator_io_start(&ctx->base);
698     gen_helper_load_tbu(cpu_gpr[gprn], tcg_env);
699 }
700 
701 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
702 {
703     gen_helper_load_atbl(cpu_gpr[gprn], tcg_env);
704 }
705 
706 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
707 {
708     gen_helper_load_atbu(cpu_gpr[gprn], tcg_env);
709 }
710 
711 #if !defined(CONFIG_USER_ONLY)
712 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
713 {
714     if (!gen_serialize_core_lpar(ctx)) {
715         return;
716     }
717 
718     translator_io_start(&ctx->base);
719     gen_helper_store_tbl(tcg_env, cpu_gpr[gprn]);
720 }
721 
722 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
723 {
724     if (!gen_serialize_core_lpar(ctx)) {
725         return;
726     }
727 
728     translator_io_start(&ctx->base);
729     gen_helper_store_tbu(tcg_env, cpu_gpr[gprn]);
730 }
731 
732 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
733 {
734     gen_helper_store_atbl(tcg_env, cpu_gpr[gprn]);
735 }
736 
737 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
738 {
739     gen_helper_store_atbu(tcg_env, cpu_gpr[gprn]);
740 }
741 
742 #if defined(TARGET_PPC64)
743 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
744 {
745     translator_io_start(&ctx->base);
746     gen_helper_load_purr(cpu_gpr[gprn], tcg_env);
747 }
748 
749 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
750 {
751     if (!gen_serialize_core_lpar(ctx)) {
752         return;
753     }
754     translator_io_start(&ctx->base);
755     gen_helper_store_purr(tcg_env, cpu_gpr[gprn]);
756 }
757 
758 /* HDECR */
759 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
760 {
761     translator_io_start(&ctx->base);
762     gen_helper_load_hdecr(cpu_gpr[gprn], tcg_env);
763 }
764 
765 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
766 {
767     if (!gen_serialize_core_lpar(ctx)) {
768         return;
769     }
770     translator_io_start(&ctx->base);
771     gen_helper_store_hdecr(tcg_env, cpu_gpr[gprn]);
772 }
773 
774 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
775 {
776     translator_io_start(&ctx->base);
777     gen_helper_load_vtb(cpu_gpr[gprn], tcg_env);
778 }
779 
780 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
781 {
782     if (!gen_serialize_core_lpar(ctx)) {
783         return;
784     }
785     translator_io_start(&ctx->base);
786     gen_helper_store_vtb(tcg_env, cpu_gpr[gprn]);
787 }
788 
789 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
790 {
791     if (!gen_serialize_core_lpar(ctx)) {
792         return;
793     }
794     translator_io_start(&ctx->base);
795     gen_helper_store_tbu40(tcg_env, cpu_gpr[gprn]);
796 }
797 
798 #endif
799 #endif
800 
801 #if !defined(CONFIG_USER_ONLY)
802 /* IBAT0U...IBAT0U */
803 /* IBAT0L...IBAT7L */
804 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
805 {
806     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
807                   offsetof(CPUPPCState,
808                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
809 }
810 
811 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
812 {
813     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
814                   offsetof(CPUPPCState,
815                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
816 }
817 
818 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
819 {
820     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0U) / 2);
821     gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
822 }
823 
824 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
825 {
826     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4U) / 2) + 4);
827     gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
828 }
829 
830 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
831 {
832     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0L) / 2);
833     gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
834 }
835 
836 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
837 {
838     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4L) / 2) + 4);
839     gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
840 }
841 
842 /* DBAT0U...DBAT7U */
843 /* DBAT0L...DBAT7L */
844 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
845 {
846     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
847                   offsetof(CPUPPCState,
848                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
849 }
850 
851 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
852 {
853     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
854                   offsetof(CPUPPCState,
855                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
856 }
857 
858 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
859 {
860     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0U) / 2);
861     gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
862 }
863 
864 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
865 {
866     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4U) / 2) + 4);
867     gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
868 }
869 
870 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
871 {
872     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0L) / 2);
873     gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
874 }
875 
876 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
877 {
878     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4L) / 2) + 4);
879     gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
880 }
881 
882 /* SDR1 */
883 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
884 {
885     gen_helper_store_sdr1(tcg_env, cpu_gpr[gprn]);
886 }
887 
888 #if defined(TARGET_PPC64)
889 /* 64 bits PowerPC specific SPRs */
890 /* PIDR */
891 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
892 {
893     gen_helper_store_pidr(tcg_env, cpu_gpr[gprn]);
894 }
895 
896 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
897 {
898     gen_helper_store_lpidr(tcg_env, cpu_gpr[gprn]);
899 }
900 
901 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
902 {
903     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env, offsetof(CPUPPCState, excp_prefix));
904 }
905 
906 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
907 {
908     TCGv t0 = tcg_temp_new();
909     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
910     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
911 }
912 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
913 {
914     if (!gen_serialize_core(ctx)) {
915         return;
916     }
917 
918     gen_helper_store_ptcr(tcg_env, cpu_gpr[gprn]);
919 }
920 
921 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
922 {
923     gen_helper_store_pcr(tcg_env, cpu_gpr[gprn]);
924 }
925 
926 /* DPDES */
927 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
928 {
929     if (!gen_serialize_core_lpar(ctx)) {
930         return;
931     }
932 
933     gen_helper_load_dpdes(cpu_gpr[gprn], tcg_env);
934 }
935 
936 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
937 {
938     if (!gen_serialize_core_lpar(ctx)) {
939         return;
940     }
941 
942     gen_helper_store_dpdes(tcg_env, cpu_gpr[gprn]);
943 }
944 #endif
945 #endif
946 
947 /* PowerPC 40x specific registers */
948 #if !defined(CONFIG_USER_ONLY)
949 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
950 {
951     translator_io_start(&ctx->base);
952     gen_helper_load_40x_pit(cpu_gpr[gprn], tcg_env);
953 }
954 
955 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
956 {
957     translator_io_start(&ctx->base);
958     gen_helper_store_40x_pit(tcg_env, cpu_gpr[gprn]);
959 }
960 
961 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
962 {
963     translator_io_start(&ctx->base);
964     gen_store_spr(sprn, cpu_gpr[gprn]);
965     gen_helper_store_40x_dbcr0(tcg_env, cpu_gpr[gprn]);
966     /* We must stop translation as we may have rebooted */
967     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
968 }
969 
970 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
971 {
972     translator_io_start(&ctx->base);
973     gen_helper_store_40x_sler(tcg_env, cpu_gpr[gprn]);
974 }
975 
976 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
977 {
978     translator_io_start(&ctx->base);
979     gen_helper_store_40x_tcr(tcg_env, cpu_gpr[gprn]);
980 }
981 
982 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
983 {
984     translator_io_start(&ctx->base);
985     gen_helper_store_40x_tsr(tcg_env, cpu_gpr[gprn]);
986 }
987 
988 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
989 {
990     TCGv t0 = tcg_temp_new();
991     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
992     gen_helper_store_40x_pid(tcg_env, t0);
993 }
994 
995 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
996 {
997     translator_io_start(&ctx->base);
998     gen_helper_store_booke_tcr(tcg_env, cpu_gpr[gprn]);
999 }
1000 
1001 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
1002 {
1003     translator_io_start(&ctx->base);
1004     gen_helper_store_booke_tsr(tcg_env, cpu_gpr[gprn]);
1005 }
1006 #endif
1007 
1008 /* PIR */
1009 #if !defined(CONFIG_USER_ONLY)
1010 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
1011 {
1012     TCGv t0 = tcg_temp_new();
1013     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
1014     gen_store_spr(SPR_PIR, t0);
1015 }
1016 #endif
1017 
1018 /* SPE specific registers */
1019 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
1020 {
1021     TCGv_i32 t0 = tcg_temp_new_i32();
1022     tcg_gen_ld_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1023     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
1024 }
1025 
1026 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
1027 {
1028     TCGv_i32 t0 = tcg_temp_new_i32();
1029     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
1030     tcg_gen_st_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1031 }
1032 
1033 #if !defined(CONFIG_USER_ONLY)
1034 /* Callback used to write the exception vector base */
1035 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
1036 {
1037     TCGv t0 = tcg_temp_new();
1038     tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivpr_mask));
1039     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1040     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
1041     gen_store_spr(sprn, t0);
1042 }
1043 
1044 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
1045 {
1046     int sprn_offs;
1047 
1048     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
1049         sprn_offs = sprn - SPR_BOOKE_IVOR0;
1050     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
1051         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
1052     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
1053         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
1054     } else {
1055         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
1056                       " vector 0x%03x\n", sprn);
1057         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1058         return;
1059     }
1060 
1061     TCGv t0 = tcg_temp_new();
1062     tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivor_mask));
1063     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1064     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
1065     gen_store_spr(sprn, t0);
1066 }
1067 #endif
1068 
1069 #ifdef TARGET_PPC64
1070 #ifndef CONFIG_USER_ONLY
1071 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
1072 {
1073     TCGv t0 = tcg_temp_new();
1074     TCGv t1 = tcg_temp_new();
1075     TCGv t2 = tcg_temp_new();
1076 
1077     /*
1078      * Note, the HV=1 PR=0 case is handled earlier by simply using
1079      * spr_write_generic for HV mode in the SPR table
1080      */
1081 
1082     /* Build insertion mask into t1 based on context */
1083     if (ctx->pr) {
1084         gen_load_spr(t1, SPR_UAMOR);
1085     } else {
1086         gen_load_spr(t1, SPR_AMOR);
1087     }
1088 
1089     /* Mask new bits into t2 */
1090     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1091 
1092     /* Load AMR and clear new bits in t0 */
1093     gen_load_spr(t0, SPR_AMR);
1094     tcg_gen_andc_tl(t0, t0, t1);
1095 
1096     /* Or'in new bits and write it out */
1097     tcg_gen_or_tl(t0, t0, t2);
1098     gen_store_spr(SPR_AMR, t0);
1099     spr_store_dump_spr(SPR_AMR);
1100 }
1101 
1102 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1103 {
1104     TCGv t0 = tcg_temp_new();
1105     TCGv t1 = tcg_temp_new();
1106     TCGv t2 = tcg_temp_new();
1107 
1108     /*
1109      * Note, the HV=1 case is handled earlier by simply using
1110      * spr_write_generic for HV mode in the SPR table
1111      */
1112 
1113     /* Build insertion mask into t1 based on context */
1114     gen_load_spr(t1, SPR_AMOR);
1115 
1116     /* Mask new bits into t2 */
1117     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1118 
1119     /* Load AMR and clear new bits in t0 */
1120     gen_load_spr(t0, SPR_UAMOR);
1121     tcg_gen_andc_tl(t0, t0, t1);
1122 
1123     /* Or'in new bits and write it out */
1124     tcg_gen_or_tl(t0, t0, t2);
1125     gen_store_spr(SPR_UAMOR, t0);
1126     spr_store_dump_spr(SPR_UAMOR);
1127 }
1128 
1129 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1130 {
1131     TCGv t0 = tcg_temp_new();
1132     TCGv t1 = tcg_temp_new();
1133     TCGv t2 = tcg_temp_new();
1134 
1135     /*
1136      * Note, the HV=1 case is handled earlier by simply using
1137      * spr_write_generic for HV mode in the SPR table
1138      */
1139 
1140     /* Build insertion mask into t1 based on context */
1141     gen_load_spr(t1, SPR_AMOR);
1142 
1143     /* Mask new bits into t2 */
1144     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1145 
1146     /* Load AMR and clear new bits in t0 */
1147     gen_load_spr(t0, SPR_IAMR);
1148     tcg_gen_andc_tl(t0, t0, t1);
1149 
1150     /* Or'in new bits and write it out */
1151     tcg_gen_or_tl(t0, t0, t2);
1152     gen_store_spr(SPR_IAMR, t0);
1153     spr_store_dump_spr(SPR_IAMR);
1154 }
1155 #endif
1156 #endif
1157 
1158 #ifndef CONFIG_USER_ONLY
1159 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1160 {
1161     gen_helper_fixup_thrm(tcg_env);
1162     gen_load_spr(cpu_gpr[gprn], sprn);
1163     spr_load_dump_spr(sprn);
1164 }
1165 #endif /* !CONFIG_USER_ONLY */
1166 
1167 #if !defined(CONFIG_USER_ONLY)
1168 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1169 {
1170     TCGv t0 = tcg_temp_new();
1171 
1172     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1173     gen_store_spr(sprn, t0);
1174 }
1175 
1176 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1177 {
1178     TCGv t0 = tcg_temp_new();
1179 
1180     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1181     gen_store_spr(sprn, t0);
1182 }
1183 
1184 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1185 {
1186     TCGv t0 = tcg_temp_new();
1187 
1188     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1189                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1190     gen_store_spr(sprn, t0);
1191 }
1192 
1193 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1194 {
1195     gen_helper_booke206_tlbflush(tcg_env, cpu_gpr[gprn]);
1196 }
1197 
1198 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1199 {
1200     TCGv_i32 t0 = tcg_constant_i32(sprn);
1201     gen_helper_booke_setpid(tcg_env, t0, cpu_gpr[gprn]);
1202 }
1203 
1204 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1205 {
1206     gen_helper_booke_set_eplc(tcg_env, cpu_gpr[gprn]);
1207 }
1208 
1209 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1210 {
1211     gen_helper_booke_set_epsc(tcg_env, cpu_gpr[gprn]);
1212 }
1213 
1214 #endif
1215 
1216 #if !defined(CONFIG_USER_ONLY)
1217 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1218 {
1219     TCGv val = tcg_temp_new();
1220     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1221     gen_store_spr(SPR_BOOKE_MAS3, val);
1222     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1223     gen_store_spr(SPR_BOOKE_MAS7, val);
1224 }
1225 
1226 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1227 {
1228     TCGv mas7 = tcg_temp_new();
1229     TCGv mas3 = tcg_temp_new();
1230     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1231     tcg_gen_shli_tl(mas7, mas7, 32);
1232     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1233     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1234 }
1235 
1236 #endif
1237 
1238 #ifdef TARGET_PPC64
1239 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1240                                     int bit, int sprn, int cause)
1241 {
1242     TCGv_i32 t1 = tcg_constant_i32(bit);
1243     TCGv_i32 t2 = tcg_constant_i32(sprn);
1244     TCGv_i32 t3 = tcg_constant_i32(cause);
1245 
1246     gen_helper_fscr_facility_check(tcg_env, t1, t2, t3);
1247 }
1248 
1249 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1250                                    int bit, int sprn, int cause)
1251 {
1252     TCGv_i32 t1 = tcg_constant_i32(bit);
1253     TCGv_i32 t2 = tcg_constant_i32(sprn);
1254     TCGv_i32 t3 = tcg_constant_i32(cause);
1255 
1256     gen_helper_msr_facility_check(tcg_env, t1, t2, t3);
1257 }
1258 
1259 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1260 {
1261     TCGv spr_up = tcg_temp_new();
1262     TCGv spr = tcg_temp_new();
1263 
1264     gen_load_spr(spr, sprn - 1);
1265     tcg_gen_shri_tl(spr_up, spr, 32);
1266     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1267 }
1268 
1269 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1270 {
1271     TCGv spr = tcg_temp_new();
1272 
1273     gen_load_spr(spr, sprn - 1);
1274     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1275     gen_store_spr(sprn - 1, spr);
1276 }
1277 
1278 #if !defined(CONFIG_USER_ONLY)
1279 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1280 {
1281     TCGv hmer = tcg_temp_new();
1282 
1283     gen_load_spr(hmer, sprn);
1284     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1285     gen_store_spr(sprn, hmer);
1286     spr_store_dump_spr(sprn);
1287 }
1288 
1289 void spr_read_tfmr(DisasContext *ctx, int gprn, int sprn)
1290 {
1291     /* Reading TFMR can cause it to be updated, so serialize threads here too */
1292     if (!gen_serialize_core(ctx)) {
1293         return;
1294     }
1295     gen_helper_load_tfmr(cpu_gpr[gprn], tcg_env);
1296 }
1297 
1298 void spr_write_tfmr(DisasContext *ctx, int sprn, int gprn)
1299 {
1300     if (!gen_serialize_core(ctx)) {
1301         return;
1302     }
1303     gen_helper_store_tfmr(tcg_env, cpu_gpr[gprn]);
1304 }
1305 
1306 void spr_write_sprc(DisasContext *ctx, int sprn, int gprn)
1307 {
1308     gen_helper_store_sprc(tcg_env, cpu_gpr[gprn]);
1309 }
1310 
1311 void spr_read_sprd(DisasContext *ctx, int gprn, int sprn)
1312 {
1313     gen_helper_load_sprd(cpu_gpr[gprn], tcg_env);
1314 }
1315 
1316 void spr_write_sprd(DisasContext *ctx, int sprn, int gprn)
1317 {
1318     if (!gen_serialize_core(ctx)) {
1319         return;
1320     }
1321     gen_helper_store_sprd(tcg_env, cpu_gpr[gprn]);
1322 }
1323 
1324 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1325 {
1326     translator_io_start(&ctx->base);
1327     gen_helper_store_lpcr(tcg_env, cpu_gpr[gprn]);
1328 }
1329 #endif /* !defined(CONFIG_USER_ONLY) */
1330 
1331 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1332 {
1333     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1334     spr_read_generic(ctx, gprn, sprn);
1335 }
1336 
1337 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1338 {
1339     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1340     spr_write_generic(ctx, sprn, gprn);
1341 }
1342 
1343 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1344 {
1345     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1346     spr_read_generic(ctx, gprn, sprn);
1347 }
1348 
1349 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1350 {
1351     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1352     spr_write_generic(ctx, sprn, gprn);
1353 }
1354 
1355 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1356 {
1357     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1358     spr_read_prev_upper32(ctx, gprn, sprn);
1359 }
1360 
1361 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1362 {
1363     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1364     spr_write_prev_upper32(ctx, sprn, gprn);
1365 }
1366 
1367 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1368 {
1369     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1370     spr_read_generic(ctx, gprn, sprn);
1371 }
1372 
1373 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1374 {
1375     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1376     spr_write_generic(ctx, sprn, gprn);
1377 }
1378 
1379 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1380 {
1381     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1382     spr_read_prev_upper32(ctx, gprn, sprn);
1383 }
1384 
1385 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1386 {
1387     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1388     spr_write_prev_upper32(ctx, sprn, gprn);
1389 }
1390 
1391 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1392 {
1393     TCGv t0 = tcg_temp_new();
1394 
1395     /*
1396      * Access to the (H)DEXCR in problem state is done using separated
1397      * SPR indexes which are 16 below the SPR indexes which have full
1398      * access to the (H)DEXCR in privileged state. Problem state can
1399      * only read bits 32:63, bits 0:31 return 0.
1400      *
1401      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1402      */
1403 
1404     gen_load_spr(t0, sprn + 16);
1405     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1406 }
1407 
1408 /* The PPR32 SPR accesses the upper 32-bits of PPR */
1409 void spr_read_ppr32(DisasContext *ctx, int gprn, int sprn)
1410 {
1411     gen_load_spr(cpu_gpr[gprn], SPR_PPR);
1412     tcg_gen_shri_tl(cpu_gpr[gprn], cpu_gpr[gprn], 32);
1413     spr_load_dump_spr(SPR_PPR);
1414 }
1415 
1416 void spr_write_ppr32(DisasContext *ctx, int sprn, int gprn)
1417 {
1418     TCGv t0 = tcg_temp_new();
1419 
1420     /*
1421      * Don't clobber the low 32-bits of the PPR. These are all reserved bits
1422      * but TCG does implement them, so it would be surprising to zero them
1423      * here. "Priority nops" are similarly careful not to clobber reserved
1424      * bits.
1425      */
1426     gen_load_spr(t0, SPR_PPR);
1427     tcg_gen_deposit_tl(t0, t0, cpu_gpr[gprn], 32, 32);
1428     gen_store_spr(SPR_PPR, t0);
1429     spr_store_dump_spr(SPR_PPR);
1430 }
1431 #endif
1432 
1433 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1434 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1435 
1436 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1437 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1438 
1439 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1440 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1441 
1442 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1443 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1444 
1445 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1446 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1447 
1448 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1449 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1450 
1451 typedef struct opcode_t {
1452     unsigned char opc1, opc2, opc3, opc4;
1453 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1454     unsigned char pad[4];
1455 #endif
1456     opc_handler_t handler;
1457     const char *oname;
1458 } opcode_t;
1459 
1460 static void gen_priv_opc(DisasContext *ctx)
1461 {
1462     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1463 }
1464 
1465 /* Helpers for priv. check */
1466 #define GEN_PRIV(CTX)              \
1467     do {                           \
1468         gen_priv_opc(CTX); return; \
1469     } while (0)
1470 
1471 #if defined(CONFIG_USER_ONLY)
1472 #define CHK_HV(CTX) GEN_PRIV(CTX)
1473 #define CHK_SV(CTX) GEN_PRIV(CTX)
1474 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1475 #else
1476 #define CHK_HV(CTX)                         \
1477     do {                                    \
1478         if (unlikely(ctx->pr || !ctx->hv)) {\
1479             GEN_PRIV(CTX);                  \
1480         }                                   \
1481     } while (0)
1482 #define CHK_SV(CTX)              \
1483     do {                         \
1484         if (unlikely(ctx->pr)) { \
1485             GEN_PRIV(CTX);       \
1486         }                        \
1487     } while (0)
1488 #define CHK_HVRM(CTX)                                   \
1489     do {                                                \
1490         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1491             GEN_PRIV(CTX);                              \
1492         }                                               \
1493     } while (0)
1494 #endif
1495 
1496 #define CHK_NONE(CTX)
1497 
1498 /*****************************************************************************/
1499 /* PowerPC instructions table                                                */
1500 
1501 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1502 {                                                                             \
1503     .opc1 = op1,                                                              \
1504     .opc2 = op2,                                                              \
1505     .opc3 = op3,                                                              \
1506     .opc4 = 0xff,                                                             \
1507     .handler = {                                                              \
1508         .inval1  = invl,                                                      \
1509         .type = _typ,                                                         \
1510         .type2 = _typ2,                                                       \
1511         .handler = &gen_##name,                                               \
1512     },                                                                        \
1513     .oname = stringify(name),                                                 \
1514 }
1515 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1516 {                                                                             \
1517     .opc1 = op1,                                                              \
1518     .opc2 = op2,                                                              \
1519     .opc3 = op3,                                                              \
1520     .opc4 = 0xff,                                                             \
1521     .handler = {                                                              \
1522         .inval1  = invl1,                                                     \
1523         .inval2  = invl2,                                                     \
1524         .type = _typ,                                                         \
1525         .type2 = _typ2,                                                       \
1526         .handler = &gen_##name,                                               \
1527     },                                                                        \
1528     .oname = stringify(name),                                                 \
1529 }
1530 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1531 {                                                                             \
1532     .opc1 = op1,                                                              \
1533     .opc2 = op2,                                                              \
1534     .opc3 = op3,                                                              \
1535     .opc4 = 0xff,                                                             \
1536     .handler = {                                                              \
1537         .inval1  = invl,                                                      \
1538         .type = _typ,                                                         \
1539         .type2 = _typ2,                                                       \
1540         .handler = &gen_##name,                                               \
1541     },                                                                        \
1542     .oname = onam,                                                            \
1543 }
1544 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1545 {                                                                             \
1546     .opc1 = op1,                                                              \
1547     .opc2 = op2,                                                              \
1548     .opc3 = op3,                                                              \
1549     .opc4 = op4,                                                              \
1550     .handler = {                                                              \
1551         .inval1  = invl,                                                      \
1552         .type = _typ,                                                         \
1553         .type2 = _typ2,                                                       \
1554         .handler = &gen_##name,                                               \
1555     },                                                                        \
1556     .oname = stringify(name),                                                 \
1557 }
1558 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1559 {                                                                             \
1560     .opc1 = op1,                                                              \
1561     .opc2 = op2,                                                              \
1562     .opc3 = op3,                                                              \
1563     .opc4 = op4,                                                              \
1564     .handler = {                                                              \
1565         .inval1  = invl,                                                      \
1566         .type = _typ,                                                         \
1567         .type2 = _typ2,                                                       \
1568         .handler = &gen_##name,                                               \
1569     },                                                                        \
1570     .oname = onam,                                                            \
1571 }
1572 
1573 /* Invalid instruction */
1574 static void gen_invalid(DisasContext *ctx)
1575 {
1576     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1577 }
1578 
1579 static opc_handler_t invalid_handler = {
1580     .inval1  = 0xFFFFFFFF,
1581     .inval2  = 0xFFFFFFFF,
1582     .type    = PPC_NONE,
1583     .type2   = PPC_NONE,
1584     .handler = gen_invalid,
1585 };
1586 
1587 /***                           Integer comparison                          ***/
1588 
1589 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1590 {
1591     TCGv t0 = tcg_temp_new();
1592     TCGv_i32 t = tcg_temp_new_i32();
1593 
1594     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1595                        t0, arg0, arg1,
1596                        tcg_constant_tl(CRF_LT), tcg_constant_tl(CRF_EQ));
1597     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1598                        t0, arg0, arg1, tcg_constant_tl(CRF_GT), t0);
1599 
1600     tcg_gen_trunc_tl_i32(t, t0);
1601     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1602     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1603 }
1604 
1605 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1606 {
1607     TCGv t0 = tcg_constant_tl(arg1);
1608     gen_op_cmp(arg0, t0, s, crf);
1609 }
1610 
1611 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1612 {
1613     TCGv t0, t1;
1614     t0 = tcg_temp_new();
1615     t1 = tcg_temp_new();
1616     if (s) {
1617         tcg_gen_ext32s_tl(t0, arg0);
1618         tcg_gen_ext32s_tl(t1, arg1);
1619     } else {
1620         tcg_gen_ext32u_tl(t0, arg0);
1621         tcg_gen_ext32u_tl(t1, arg1);
1622     }
1623     gen_op_cmp(t0, t1, s, crf);
1624 }
1625 
1626 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1627 {
1628     TCGv t0 = tcg_constant_tl(arg1);
1629     gen_op_cmp32(arg0, t0, s, crf);
1630 }
1631 
1632 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1633 {
1634     if (NARROW_MODE(ctx)) {
1635         gen_op_cmpi32(reg, 0, 1, 0);
1636     } else {
1637         gen_op_cmpi(reg, 0, 1, 0);
1638     }
1639 }
1640 
1641 /***                           Integer arithmetic                          ***/
1642 
1643 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1644                                            TCGv arg1, TCGv arg2, int sub)
1645 {
1646     TCGv t0 = tcg_temp_new();
1647 
1648     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1649     tcg_gen_xor_tl(t0, arg1, arg2);
1650     if (sub) {
1651         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1652     } else {
1653         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1654     }
1655     if (NARROW_MODE(ctx)) {
1656         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1657         if (is_isa300(ctx)) {
1658             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1659         }
1660     } else {
1661         if (is_isa300(ctx)) {
1662             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1663         }
1664         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1665     }
1666     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1667 }
1668 
1669 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1670                                              TCGv res, TCGv arg0, TCGv arg1,
1671                                              TCGv ca32, int sub)
1672 {
1673     TCGv t0;
1674 
1675     if (!is_isa300(ctx)) {
1676         return;
1677     }
1678 
1679     t0 = tcg_temp_new();
1680     if (sub) {
1681         tcg_gen_eqv_tl(t0, arg0, arg1);
1682     } else {
1683         tcg_gen_xor_tl(t0, arg0, arg1);
1684     }
1685     tcg_gen_xor_tl(t0, t0, res);
1686     tcg_gen_extract_tl(ca32, t0, 32, 1);
1687 }
1688 
1689 /* Common add function */
1690 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1691                                     TCGv arg2, TCGv ca, TCGv ca32,
1692                                     bool add_ca, bool compute_ca,
1693                                     bool compute_ov, bool compute_rc0)
1694 {
1695     TCGv t0 = ret;
1696 
1697     if (compute_ca || compute_ov) {
1698         t0 = tcg_temp_new();
1699     }
1700 
1701     if (compute_ca) {
1702         if (NARROW_MODE(ctx)) {
1703             /*
1704              * Caution: a non-obvious corner case of the spec is that
1705              * we must produce the *entire* 64-bit addition, but
1706              * produce the carry into bit 32.
1707              */
1708             TCGv t1 = tcg_temp_new();
1709             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1710             tcg_gen_add_tl(t0, arg1, arg2);
1711             if (add_ca) {
1712                 tcg_gen_add_tl(t0, t0, ca);
1713             }
1714             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1715             tcg_gen_extract_tl(ca, ca, 32, 1);
1716             if (is_isa300(ctx)) {
1717                 tcg_gen_mov_tl(ca32, ca);
1718             }
1719         } else {
1720             TCGv zero = tcg_constant_tl(0);
1721             if (add_ca) {
1722                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1723                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1724             } else {
1725                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1726             }
1727             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1728         }
1729     } else {
1730         tcg_gen_add_tl(t0, arg1, arg2);
1731         if (add_ca) {
1732             tcg_gen_add_tl(t0, t0, ca);
1733         }
1734     }
1735 
1736     if (compute_ov) {
1737         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1738     }
1739     if (unlikely(compute_rc0)) {
1740         gen_set_Rc0(ctx, t0);
1741     }
1742 
1743     if (t0 != ret) {
1744         tcg_gen_mov_tl(ret, t0);
1745     }
1746 }
1747 
1748 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret,
1749                                      TCGv arg1, TCGv arg2, bool sign,
1750                                      bool compute_ov, bool compute_rc0)
1751 {
1752     TCGv_i32 t0 = tcg_temp_new_i32();
1753     TCGv_i32 t1 = tcg_temp_new_i32();
1754     TCGv_i32 t2 = tcg_temp_new_i32();
1755     TCGv_i32 t3 = tcg_temp_new_i32();
1756 
1757     tcg_gen_trunc_tl_i32(t0, arg1);
1758     tcg_gen_trunc_tl_i32(t1, arg2);
1759     if (sign) {
1760         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1761         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1762         tcg_gen_and_i32(t2, t2, t3);
1763         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1764         tcg_gen_or_i32(t2, t2, t3);
1765         tcg_gen_movi_i32(t3, 0);
1766         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1767         tcg_gen_div_i32(t3, t0, t1);
1768         tcg_gen_extu_i32_tl(ret, t3);
1769     } else {
1770         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1771         tcg_gen_movi_i32(t3, 0);
1772         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1773         tcg_gen_divu_i32(t3, t0, t1);
1774         tcg_gen_extu_i32_tl(ret, t3);
1775     }
1776     if (compute_ov) {
1777         tcg_gen_extu_i32_tl(cpu_ov, t2);
1778         if (is_isa300(ctx)) {
1779             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1780         }
1781         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1782     }
1783 
1784     if (unlikely(compute_rc0)) {
1785         gen_set_Rc0(ctx, ret);
1786     }
1787 }
1788 
1789 #if defined(TARGET_PPC64)
1790 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret,
1791                                      TCGv arg1, TCGv arg2, bool sign,
1792                                      bool compute_ov, bool compute_rc0)
1793 {
1794     TCGv_i64 t0 = tcg_temp_new_i64();
1795     TCGv_i64 t1 = tcg_temp_new_i64();
1796     TCGv_i64 t2 = tcg_temp_new_i64();
1797     TCGv_i64 t3 = tcg_temp_new_i64();
1798 
1799     tcg_gen_mov_i64(t0, arg1);
1800     tcg_gen_mov_i64(t1, arg2);
1801     if (sign) {
1802         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1803         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1804         tcg_gen_and_i64(t2, t2, t3);
1805         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1806         tcg_gen_or_i64(t2, t2, t3);
1807         tcg_gen_movi_i64(t3, 0);
1808         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1809         tcg_gen_div_i64(ret, t0, t1);
1810     } else {
1811         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1812         tcg_gen_movi_i64(t3, 0);
1813         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1814         tcg_gen_divu_i64(ret, t0, t1);
1815     }
1816     if (compute_ov) {
1817         tcg_gen_mov_tl(cpu_ov, t2);
1818         if (is_isa300(ctx)) {
1819             tcg_gen_mov_tl(cpu_ov32, t2);
1820         }
1821         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1822     }
1823 
1824     if (unlikely(compute_rc0)) {
1825         gen_set_Rc0(ctx, ret);
1826     }
1827 }
1828 #endif
1829 
1830 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1831                                      TCGv arg2, int sign)
1832 {
1833     TCGv_i32 t0 = tcg_temp_new_i32();
1834     TCGv_i32 t1 = tcg_temp_new_i32();
1835 
1836     tcg_gen_trunc_tl_i32(t0, arg1);
1837     tcg_gen_trunc_tl_i32(t1, arg2);
1838     if (sign) {
1839         TCGv_i32 t2 = tcg_temp_new_i32();
1840         TCGv_i32 t3 = tcg_temp_new_i32();
1841         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1842         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1843         tcg_gen_and_i32(t2, t2, t3);
1844         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1845         tcg_gen_or_i32(t2, t2, t3);
1846         tcg_gen_movi_i32(t3, 0);
1847         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1848         tcg_gen_rem_i32(t3, t0, t1);
1849         tcg_gen_ext_i32_tl(ret, t3);
1850     } else {
1851         TCGv_i32 t2 = tcg_constant_i32(1);
1852         TCGv_i32 t3 = tcg_constant_i32(0);
1853         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1854         tcg_gen_remu_i32(t0, t0, t1);
1855         tcg_gen_extu_i32_tl(ret, t0);
1856     }
1857 }
1858 
1859 #if defined(TARGET_PPC64)
1860 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1861                                      TCGv arg2, int sign)
1862 {
1863     TCGv_i64 t0 = tcg_temp_new_i64();
1864     TCGv_i64 t1 = tcg_temp_new_i64();
1865 
1866     tcg_gen_mov_i64(t0, arg1);
1867     tcg_gen_mov_i64(t1, arg2);
1868     if (sign) {
1869         TCGv_i64 t2 = tcg_temp_new_i64();
1870         TCGv_i64 t3 = tcg_temp_new_i64();
1871         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1872         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1873         tcg_gen_and_i64(t2, t2, t3);
1874         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1875         tcg_gen_or_i64(t2, t2, t3);
1876         tcg_gen_movi_i64(t3, 0);
1877         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1878         tcg_gen_rem_i64(ret, t0, t1);
1879     } else {
1880         TCGv_i64 t2 = tcg_constant_i64(1);
1881         TCGv_i64 t3 = tcg_constant_i64(0);
1882         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1883         tcg_gen_remu_i64(ret, t0, t1);
1884     }
1885 }
1886 #endif
1887 
1888 /* Common subf function */
1889 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1890                                      TCGv arg2, bool add_ca, bool compute_ca,
1891                                      bool compute_ov, bool compute_rc0)
1892 {
1893     TCGv t0 = ret;
1894 
1895     if (compute_ca || compute_ov) {
1896         t0 = tcg_temp_new();
1897     }
1898 
1899     if (compute_ca) {
1900         /* dest = ~arg1 + arg2 [+ ca].  */
1901         if (NARROW_MODE(ctx)) {
1902             /*
1903              * Caution: a non-obvious corner case of the spec is that
1904              * we must produce the *entire* 64-bit addition, but
1905              * produce the carry into bit 32.
1906              */
1907             TCGv inv1 = tcg_temp_new();
1908             TCGv t1 = tcg_temp_new();
1909             tcg_gen_not_tl(inv1, arg1);
1910             if (add_ca) {
1911                 tcg_gen_add_tl(t0, arg2, cpu_ca);
1912             } else {
1913                 tcg_gen_addi_tl(t0, arg2, 1);
1914             }
1915             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
1916             tcg_gen_add_tl(t0, t0, inv1);
1917             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
1918             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
1919             if (is_isa300(ctx)) {
1920                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
1921             }
1922         } else if (add_ca) {
1923             TCGv zero, inv1 = tcg_temp_new();
1924             tcg_gen_not_tl(inv1, arg1);
1925             zero = tcg_constant_tl(0);
1926             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1927             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1928             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
1929         } else {
1930             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1931             tcg_gen_sub_tl(t0, arg2, arg1);
1932             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
1933         }
1934     } else if (add_ca) {
1935         /*
1936          * Since we're ignoring carry-out, we can simplify the
1937          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
1938          */
1939         tcg_gen_sub_tl(t0, arg2, arg1);
1940         tcg_gen_add_tl(t0, t0, cpu_ca);
1941         tcg_gen_subi_tl(t0, t0, 1);
1942     } else {
1943         tcg_gen_sub_tl(t0, arg2, arg1);
1944     }
1945 
1946     if (compute_ov) {
1947         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1948     }
1949     if (unlikely(compute_rc0)) {
1950         gen_set_Rc0(ctx, t0);
1951     }
1952 
1953     if (t0 != ret) {
1954         tcg_gen_mov_tl(ret, t0);
1955     }
1956 }
1957 
1958 /***                            Integer logical                            ***/
1959 
1960 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
1961 static void gen_pause(DisasContext *ctx)
1962 {
1963     TCGv_i32 t0 = tcg_constant_i32(0);
1964     tcg_gen_st_i32(t0, tcg_env,
1965                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
1966 
1967     /* Stop translation, this gives other CPUs a chance to run */
1968     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
1969 }
1970 #endif /* defined(TARGET_PPC64) */
1971 
1972 /***                             Integer rotate                            ***/
1973 
1974 /* rlwimi & rlwimi. */
1975 static void gen_rlwimi(DisasContext *ctx)
1976 {
1977     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
1978     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
1979     uint32_t sh = SH(ctx->opcode);
1980     uint32_t mb = MB(ctx->opcode);
1981     uint32_t me = ME(ctx->opcode);
1982 
1983     if (sh == (31 - me) && mb <= me) {
1984         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
1985     } else {
1986         target_ulong mask;
1987         bool mask_in_32b = true;
1988         TCGv t1;
1989 
1990 #if defined(TARGET_PPC64)
1991         mb += 32;
1992         me += 32;
1993 #endif
1994         mask = MASK(mb, me);
1995 
1996 #if defined(TARGET_PPC64)
1997         if (mask > 0xffffffffu) {
1998             mask_in_32b = false;
1999         }
2000 #endif
2001         t1 = tcg_temp_new();
2002         if (mask_in_32b) {
2003             TCGv_i32 t0 = tcg_temp_new_i32();
2004             tcg_gen_trunc_tl_i32(t0, t_rs);
2005             tcg_gen_rotli_i32(t0, t0, sh);
2006             tcg_gen_extu_i32_tl(t1, t0);
2007         } else {
2008 #if defined(TARGET_PPC64)
2009             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2010             tcg_gen_rotli_i64(t1, t1, sh);
2011 #else
2012             g_assert_not_reached();
2013 #endif
2014         }
2015 
2016         tcg_gen_andi_tl(t1, t1, mask);
2017         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2018         tcg_gen_or_tl(t_ra, t_ra, t1);
2019     }
2020     if (unlikely(Rc(ctx->opcode) != 0)) {
2021         gen_set_Rc0(ctx, t_ra);
2022     }
2023 }
2024 
2025 /* rlwinm & rlwinm. */
2026 static void gen_rlwinm(DisasContext *ctx)
2027 {
2028     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2029     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2030     int sh = SH(ctx->opcode);
2031     int mb = MB(ctx->opcode);
2032     int me = ME(ctx->opcode);
2033     int len = me - mb + 1;
2034     int rsh = (32 - sh) & 31;
2035 
2036     if (sh != 0 && len > 0 && me == (31 - sh)) {
2037         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2038     } else if (me == 31 && rsh + len <= 32) {
2039         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2040     } else {
2041         target_ulong mask;
2042         bool mask_in_32b = true;
2043 #if defined(TARGET_PPC64)
2044         mb += 32;
2045         me += 32;
2046 #endif
2047         mask = MASK(mb, me);
2048 #if defined(TARGET_PPC64)
2049         if (mask > 0xffffffffu) {
2050             mask_in_32b = false;
2051         }
2052 #endif
2053         if (mask_in_32b) {
2054             if (sh == 0) {
2055                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2056             } else {
2057                 TCGv_i32 t0 = tcg_temp_new_i32();
2058                 tcg_gen_trunc_tl_i32(t0, t_rs);
2059                 tcg_gen_rotli_i32(t0, t0, sh);
2060                 tcg_gen_andi_i32(t0, t0, mask);
2061                 tcg_gen_extu_i32_tl(t_ra, t0);
2062             }
2063         } else {
2064 #if defined(TARGET_PPC64)
2065             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2066             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2067             tcg_gen_andi_i64(t_ra, t_ra, mask);
2068 #else
2069             g_assert_not_reached();
2070 #endif
2071         }
2072     }
2073     if (unlikely(Rc(ctx->opcode) != 0)) {
2074         gen_set_Rc0(ctx, t_ra);
2075     }
2076 }
2077 
2078 /* rlwnm & rlwnm. */
2079 static void gen_rlwnm(DisasContext *ctx)
2080 {
2081     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2082     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2083     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2084     uint32_t mb = MB(ctx->opcode);
2085     uint32_t me = ME(ctx->opcode);
2086     target_ulong mask;
2087     bool mask_in_32b = true;
2088 
2089 #if defined(TARGET_PPC64)
2090     mb += 32;
2091     me += 32;
2092 #endif
2093     mask = MASK(mb, me);
2094 
2095 #if defined(TARGET_PPC64)
2096     if (mask > 0xffffffffu) {
2097         mask_in_32b = false;
2098     }
2099 #endif
2100     if (mask_in_32b) {
2101         TCGv_i32 t0 = tcg_temp_new_i32();
2102         TCGv_i32 t1 = tcg_temp_new_i32();
2103         tcg_gen_trunc_tl_i32(t0, t_rb);
2104         tcg_gen_trunc_tl_i32(t1, t_rs);
2105         tcg_gen_andi_i32(t0, t0, 0x1f);
2106         tcg_gen_rotl_i32(t1, t1, t0);
2107         tcg_gen_extu_i32_tl(t_ra, t1);
2108     } else {
2109 #if defined(TARGET_PPC64)
2110         TCGv_i64 t0 = tcg_temp_new_i64();
2111         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2112         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2113         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2114 #else
2115         g_assert_not_reached();
2116 #endif
2117     }
2118 
2119     tcg_gen_andi_tl(t_ra, t_ra, mask);
2120 
2121     if (unlikely(Rc(ctx->opcode) != 0)) {
2122         gen_set_Rc0(ctx, t_ra);
2123     }
2124 }
2125 
2126 #if defined(TARGET_PPC64)
2127 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2128 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2129 {                                                                             \
2130     gen_##name(ctx, 0);                                                       \
2131 }                                                                             \
2132                                                                               \
2133 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2134 {                                                                             \
2135     gen_##name(ctx, 1);                                                       \
2136 }
2137 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2138 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2139 {                                                                             \
2140     gen_##name(ctx, 0, 0);                                                    \
2141 }                                                                             \
2142                                                                               \
2143 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2144 {                                                                             \
2145     gen_##name(ctx, 0, 1);                                                    \
2146 }                                                                             \
2147                                                                               \
2148 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2149 {                                                                             \
2150     gen_##name(ctx, 1, 0);                                                    \
2151 }                                                                             \
2152                                                                               \
2153 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2154 {                                                                             \
2155     gen_##name(ctx, 1, 1);                                                    \
2156 }
2157 
2158 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2159 {
2160     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2161     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2162     int len = me - mb + 1;
2163     int rsh = (64 - sh) & 63;
2164 
2165     if (sh != 0 && len > 0 && me == (63 - sh)) {
2166         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2167     } else if (me == 63 && rsh + len <= 64) {
2168         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2169     } else {
2170         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2171         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2172     }
2173     if (unlikely(Rc(ctx->opcode) != 0)) {
2174         gen_set_Rc0(ctx, t_ra);
2175     }
2176 }
2177 
2178 /* rldicl - rldicl. */
2179 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2180 {
2181     uint32_t sh, mb;
2182 
2183     sh = SH(ctx->opcode) | (shn << 5);
2184     mb = MB(ctx->opcode) | (mbn << 5);
2185     gen_rldinm(ctx, mb, 63, sh);
2186 }
2187 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2188 
2189 /* rldicr - rldicr. */
2190 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2191 {
2192     uint32_t sh, me;
2193 
2194     sh = SH(ctx->opcode) | (shn << 5);
2195     me = MB(ctx->opcode) | (men << 5);
2196     gen_rldinm(ctx, 0, me, sh);
2197 }
2198 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2199 
2200 /* rldic - rldic. */
2201 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2202 {
2203     uint32_t sh, mb;
2204 
2205     sh = SH(ctx->opcode) | (shn << 5);
2206     mb = MB(ctx->opcode) | (mbn << 5);
2207     gen_rldinm(ctx, mb, 63 - sh, sh);
2208 }
2209 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2210 
2211 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2212 {
2213     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2214     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2215     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2216     TCGv t0;
2217 
2218     t0 = tcg_temp_new();
2219     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2220     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2221 
2222     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2223     if (unlikely(Rc(ctx->opcode) != 0)) {
2224         gen_set_Rc0(ctx, t_ra);
2225     }
2226 }
2227 
2228 /* rldcl - rldcl. */
2229 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2230 {
2231     uint32_t mb;
2232 
2233     mb = MB(ctx->opcode) | (mbn << 5);
2234     gen_rldnm(ctx, mb, 63);
2235 }
2236 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2237 
2238 /* rldcr - rldcr. */
2239 static inline void gen_rldcr(DisasContext *ctx, int men)
2240 {
2241     uint32_t me;
2242 
2243     me = MB(ctx->opcode) | (men << 5);
2244     gen_rldnm(ctx, 0, me);
2245 }
2246 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2247 
2248 /* rldimi - rldimi. */
2249 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2250 {
2251     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2252     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2253     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2254     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2255     uint32_t me = 63 - sh;
2256 
2257     if (mb <= me) {
2258         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2259     } else {
2260         target_ulong mask = MASK(mb, me);
2261         TCGv t1 = tcg_temp_new();
2262 
2263         tcg_gen_rotli_tl(t1, t_rs, sh);
2264         tcg_gen_andi_tl(t1, t1, mask);
2265         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2266         tcg_gen_or_tl(t_ra, t_ra, t1);
2267     }
2268     if (unlikely(Rc(ctx->opcode) != 0)) {
2269         gen_set_Rc0(ctx, t_ra);
2270     }
2271 }
2272 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2273 #endif
2274 
2275 /***                             Integer shift                             ***/
2276 
2277 /* slw & slw. */
2278 static void gen_slw(DisasContext *ctx)
2279 {
2280     TCGv t0, t1;
2281 
2282     t0 = tcg_temp_new();
2283     /* AND rS with a mask that is 0 when rB >= 0x20 */
2284 #if defined(TARGET_PPC64)
2285     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2286     tcg_gen_sari_tl(t0, t0, 0x3f);
2287 #else
2288     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2289     tcg_gen_sari_tl(t0, t0, 0x1f);
2290 #endif
2291     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2292     t1 = tcg_temp_new();
2293     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2294     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2295     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2296     if (unlikely(Rc(ctx->opcode) != 0)) {
2297         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2298     }
2299 }
2300 
2301 /* sraw & sraw. */
2302 static void gen_sraw(DisasContext *ctx)
2303 {
2304     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], tcg_env,
2305                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2306     if (unlikely(Rc(ctx->opcode) != 0)) {
2307         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2308     }
2309 }
2310 
2311 /* srawi & srawi. */
2312 static void gen_srawi(DisasContext *ctx)
2313 {
2314     int sh = SH(ctx->opcode);
2315     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2316     TCGv src = cpu_gpr[rS(ctx->opcode)];
2317     if (sh == 0) {
2318         tcg_gen_ext32s_tl(dst, src);
2319         tcg_gen_movi_tl(cpu_ca, 0);
2320         if (is_isa300(ctx)) {
2321             tcg_gen_movi_tl(cpu_ca32, 0);
2322         }
2323     } else {
2324         TCGv t0;
2325         tcg_gen_ext32s_tl(dst, src);
2326         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2327         t0 = tcg_temp_new();
2328         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2329         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2330         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2331         if (is_isa300(ctx)) {
2332             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2333         }
2334         tcg_gen_sari_tl(dst, dst, sh);
2335     }
2336     if (unlikely(Rc(ctx->opcode) != 0)) {
2337         gen_set_Rc0(ctx, dst);
2338     }
2339 }
2340 
2341 /* srw & srw. */
2342 static void gen_srw(DisasContext *ctx)
2343 {
2344     TCGv t0, t1;
2345 
2346     t0 = tcg_temp_new();
2347     /* AND rS with a mask that is 0 when rB >= 0x20 */
2348 #if defined(TARGET_PPC64)
2349     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2350     tcg_gen_sari_tl(t0, t0, 0x3f);
2351 #else
2352     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2353     tcg_gen_sari_tl(t0, t0, 0x1f);
2354 #endif
2355     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2356     tcg_gen_ext32u_tl(t0, t0);
2357     t1 = tcg_temp_new();
2358     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2359     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2360     if (unlikely(Rc(ctx->opcode) != 0)) {
2361         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2362     }
2363 }
2364 
2365 #if defined(TARGET_PPC64)
2366 /* sld & sld. */
2367 static void gen_sld(DisasContext *ctx)
2368 {
2369     TCGv t0, t1;
2370 
2371     t0 = tcg_temp_new();
2372     /* AND rS with a mask that is 0 when rB >= 0x40 */
2373     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2374     tcg_gen_sari_tl(t0, t0, 0x3f);
2375     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2376     t1 = tcg_temp_new();
2377     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2378     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2379     if (unlikely(Rc(ctx->opcode) != 0)) {
2380         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2381     }
2382 }
2383 
2384 /* srad & srad. */
2385 static void gen_srad(DisasContext *ctx)
2386 {
2387     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], tcg_env,
2388                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2389     if (unlikely(Rc(ctx->opcode) != 0)) {
2390         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2391     }
2392 }
2393 /* sradi & sradi. */
2394 static inline void gen_sradi(DisasContext *ctx, int n)
2395 {
2396     int sh = SH(ctx->opcode) + (n << 5);
2397     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2398     TCGv src = cpu_gpr[rS(ctx->opcode)];
2399     if (sh == 0) {
2400         tcg_gen_mov_tl(dst, src);
2401         tcg_gen_movi_tl(cpu_ca, 0);
2402         if (is_isa300(ctx)) {
2403             tcg_gen_movi_tl(cpu_ca32, 0);
2404         }
2405     } else {
2406         TCGv t0;
2407         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2408         t0 = tcg_temp_new();
2409         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2410         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2411         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2412         if (is_isa300(ctx)) {
2413             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2414         }
2415         tcg_gen_sari_tl(dst, src, sh);
2416     }
2417     if (unlikely(Rc(ctx->opcode) != 0)) {
2418         gen_set_Rc0(ctx, dst);
2419     }
2420 }
2421 
2422 static void gen_sradi0(DisasContext *ctx)
2423 {
2424     gen_sradi(ctx, 0);
2425 }
2426 
2427 static void gen_sradi1(DisasContext *ctx)
2428 {
2429     gen_sradi(ctx, 1);
2430 }
2431 
2432 /* extswsli & extswsli. */
2433 static inline void gen_extswsli(DisasContext *ctx, int n)
2434 {
2435     int sh = SH(ctx->opcode) + (n << 5);
2436     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2437     TCGv src = cpu_gpr[rS(ctx->opcode)];
2438 
2439     tcg_gen_ext32s_tl(dst, src);
2440     tcg_gen_shli_tl(dst, dst, sh);
2441     if (unlikely(Rc(ctx->opcode) != 0)) {
2442         gen_set_Rc0(ctx, dst);
2443     }
2444 }
2445 
2446 static void gen_extswsli0(DisasContext *ctx)
2447 {
2448     gen_extswsli(ctx, 0);
2449 }
2450 
2451 static void gen_extswsli1(DisasContext *ctx)
2452 {
2453     gen_extswsli(ctx, 1);
2454 }
2455 
2456 /* srd & srd. */
2457 static void gen_srd(DisasContext *ctx)
2458 {
2459     TCGv t0, t1;
2460 
2461     t0 = tcg_temp_new();
2462     /* AND rS with a mask that is 0 when rB >= 0x40 */
2463     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2464     tcg_gen_sari_tl(t0, t0, 0x3f);
2465     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2466     t1 = tcg_temp_new();
2467     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2468     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2469     if (unlikely(Rc(ctx->opcode) != 0)) {
2470         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2471     }
2472 }
2473 #endif
2474 
2475 /***                           Addressing modes                            ***/
2476 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2477 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2478                                       target_long maskl)
2479 {
2480     target_long simm = SIMM(ctx->opcode);
2481 
2482     simm &= ~maskl;
2483     if (rA(ctx->opcode) == 0) {
2484         if (NARROW_MODE(ctx)) {
2485             simm = (uint32_t)simm;
2486         }
2487         tcg_gen_movi_tl(EA, simm);
2488     } else if (likely(simm != 0)) {
2489         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2490         if (NARROW_MODE(ctx)) {
2491             tcg_gen_ext32u_tl(EA, EA);
2492         }
2493     } else {
2494         if (NARROW_MODE(ctx)) {
2495             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2496         } else {
2497             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2498         }
2499     }
2500 }
2501 
2502 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2503 {
2504     if (rA(ctx->opcode) == 0) {
2505         if (NARROW_MODE(ctx)) {
2506             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2507         } else {
2508             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2509         }
2510     } else {
2511         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2512         if (NARROW_MODE(ctx)) {
2513             tcg_gen_ext32u_tl(EA, EA);
2514         }
2515     }
2516 }
2517 
2518 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2519 {
2520     if (rA(ctx->opcode) == 0) {
2521         tcg_gen_movi_tl(EA, 0);
2522     } else if (NARROW_MODE(ctx)) {
2523         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2524     } else {
2525         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2526     }
2527 }
2528 
2529 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2530                                 target_long val)
2531 {
2532     tcg_gen_addi_tl(ret, arg1, val);
2533     if (NARROW_MODE(ctx)) {
2534         tcg_gen_ext32u_tl(ret, ret);
2535     }
2536 }
2537 
2538 static inline void gen_align_no_le(DisasContext *ctx)
2539 {
2540     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
2541                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
2542 }
2543 
2544 /* EA <- {(ra == 0) ? 0 : GPR[ra]} + displ */
2545 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
2546 {
2547     TCGv ea = tcg_temp_new();
2548     if (ra) {
2549         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
2550     } else {
2551         tcg_gen_mov_tl(ea, displ);
2552     }
2553     if (NARROW_MODE(ctx)) {
2554         tcg_gen_ext32u_tl(ea, ea);
2555     }
2556     return ea;
2557 }
2558 
2559 #if defined(TARGET_PPC64)
2560 /* EA <- (ra == 0) ? 0 : GPR[ra] */
2561 static TCGv do_ea_calc_ra(DisasContext *ctx, int ra)
2562 {
2563     TCGv EA = tcg_temp_new();
2564     if (!ra) {
2565         tcg_gen_movi_tl(EA, 0);
2566     } else if (NARROW_MODE(ctx)) {
2567         tcg_gen_ext32u_tl(EA, cpu_gpr[ra]);
2568     } else {
2569         tcg_gen_mov_tl(EA, cpu_gpr[ra]);
2570     }
2571     return EA;
2572 }
2573 #endif
2574 
2575 /***                             Integer load                              ***/
2576 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
2577 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
2578 
2579 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
2580 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
2581                                   TCGv val,                             \
2582                                   TCGv addr)                            \
2583 {                                                                       \
2584     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
2585 }
2586 
2587 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
2588 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
2589 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
2590 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
2591 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
2592 
2593 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
2594 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
2595 
2596 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
2597 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
2598                                              TCGv_i64 val,          \
2599                                              TCGv addr)             \
2600 {                                                                   \
2601     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
2602 }
2603 
2604 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
2605 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
2606 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
2607 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
2608 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
2609 
2610 #if defined(TARGET_PPC64)
2611 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
2612 #endif
2613 
2614 #define GEN_QEMU_STORE_TL(stop, op)                                     \
2615 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
2616                                   TCGv val,                             \
2617                                   TCGv addr)                            \
2618 {                                                                       \
2619     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
2620 }
2621 
2622 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
2623 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
2624 #endif
2625 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
2626 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
2627 
2628 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
2629 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
2630 
2631 #define GEN_QEMU_STORE_64(stop, op)                               \
2632 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
2633                                               TCGv_i64 val,       \
2634                                               TCGv addr)          \
2635 {                                                                 \
2636     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
2637 }
2638 
2639 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
2640 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
2641 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
2642 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
2643 
2644 #if defined(TARGET_PPC64)
2645 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
2646 #endif
2647 
2648 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
2649 static void glue(gen_, name##x)(DisasContext *ctx)                            \
2650 {                                                                             \
2651     TCGv EA;                                                                  \
2652     chk(ctx);                                                                 \
2653     gen_set_access_type(ctx, ACCESS_INT);                                     \
2654     EA = tcg_temp_new();                                                      \
2655     gen_addr_reg_index(ctx, EA);                                              \
2656     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
2657 }
2658 
2659 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
2660     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2661 
2662 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
2663     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2664 
2665 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
2666 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2667 {                                                                             \
2668     TCGv EA;                                                                  \
2669     CHK_SV(ctx);                                                              \
2670     gen_set_access_type(ctx, ACCESS_INT);                                     \
2671     EA = tcg_temp_new();                                                      \
2672     gen_addr_reg_index(ctx, EA);                                              \
2673     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
2674 }
2675 
2676 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
2677 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
2678 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
2679 #if defined(TARGET_PPC64)
2680 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
2681 #endif
2682 
2683 #if defined(TARGET_PPC64)
2684 /* CI load/store variants */
2685 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
2686 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
2687 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
2688 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
2689 #endif
2690 
2691 /***                              Integer store                            ***/
2692 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
2693 static void glue(gen_, name##x)(DisasContext *ctx)                            \
2694 {                                                                             \
2695     TCGv EA;                                                                  \
2696     chk(ctx);                                                                 \
2697     gen_set_access_type(ctx, ACCESS_INT);                                     \
2698     EA = tcg_temp_new();                                                      \
2699     gen_addr_reg_index(ctx, EA);                                              \
2700     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
2701 }
2702 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
2703     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2704 
2705 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
2706     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2707 
2708 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
2709 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2710 {                                                                             \
2711     TCGv EA;                                                                  \
2712     CHK_SV(ctx);                                                              \
2713     gen_set_access_type(ctx, ACCESS_INT);                                     \
2714     EA = tcg_temp_new();                                                      \
2715     gen_addr_reg_index(ctx, EA);                                              \
2716     tcg_gen_qemu_st_tl(                                                       \
2717         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
2718 }
2719 
2720 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
2721 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
2722 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
2723 #if defined(TARGET_PPC64)
2724 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
2725 #endif
2726 
2727 #if defined(TARGET_PPC64)
2728 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
2729 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
2730 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
2731 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
2732 #endif
2733 /***                Integer load and store with byte reverse               ***/
2734 
2735 /* lhbrx */
2736 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2737 
2738 /* lwbrx */
2739 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2740 
2741 #if defined(TARGET_PPC64)
2742 /* ldbrx */
2743 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
2744 /* stdbrx */
2745 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
2746 #endif  /* TARGET_PPC64 */
2747 
2748 /* sthbrx */
2749 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2750 /* stwbrx */
2751 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2752 
2753 /***                    Integer load and store multiple                    ***/
2754 
2755 /* lmw */
2756 static void gen_lmw(DisasContext *ctx)
2757 {
2758     TCGv t0;
2759     TCGv_i32 t1;
2760 
2761     if (ctx->le_mode) {
2762         gen_align_no_le(ctx);
2763         return;
2764     }
2765     gen_set_access_type(ctx, ACCESS_INT);
2766     t0 = tcg_temp_new();
2767     t1 = tcg_constant_i32(rD(ctx->opcode));
2768     gen_addr_imm_index(ctx, t0, 0);
2769     gen_helper_lmw(tcg_env, t0, t1);
2770 }
2771 
2772 /* stmw */
2773 static void gen_stmw(DisasContext *ctx)
2774 {
2775     TCGv t0;
2776     TCGv_i32 t1;
2777 
2778     if (ctx->le_mode) {
2779         gen_align_no_le(ctx);
2780         return;
2781     }
2782     gen_set_access_type(ctx, ACCESS_INT);
2783     t0 = tcg_temp_new();
2784     t1 = tcg_constant_i32(rS(ctx->opcode));
2785     gen_addr_imm_index(ctx, t0, 0);
2786     gen_helper_stmw(tcg_env, t0, t1);
2787 }
2788 
2789 /***                    Integer load and store strings                     ***/
2790 
2791 /* lswi */
2792 /*
2793  * PowerPC32 specification says we must generate an exception if rA is
2794  * in the range of registers to be loaded.  In an other hand, IBM says
2795  * this is valid, but rA won't be loaded.  For now, I'll follow the
2796  * spec...
2797  */
2798 static void gen_lswi(DisasContext *ctx)
2799 {
2800     TCGv t0;
2801     TCGv_i32 t1, t2;
2802     int nb = NB(ctx->opcode);
2803     int start = rD(ctx->opcode);
2804     int ra = rA(ctx->opcode);
2805     int nr;
2806 
2807     if (ctx->le_mode) {
2808         gen_align_no_le(ctx);
2809         return;
2810     }
2811     if (nb == 0) {
2812         nb = 32;
2813     }
2814     nr = DIV_ROUND_UP(nb, 4);
2815     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
2816         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2817         return;
2818     }
2819     gen_set_access_type(ctx, ACCESS_INT);
2820     t0 = tcg_temp_new();
2821     gen_addr_register(ctx, t0);
2822     t1 = tcg_constant_i32(nb);
2823     t2 = tcg_constant_i32(start);
2824     gen_helper_lsw(tcg_env, t0, t1, t2);
2825 }
2826 
2827 /* lswx */
2828 static void gen_lswx(DisasContext *ctx)
2829 {
2830     TCGv t0;
2831     TCGv_i32 t1, t2, t3;
2832 
2833     if (ctx->le_mode) {
2834         gen_align_no_le(ctx);
2835         return;
2836     }
2837     gen_set_access_type(ctx, ACCESS_INT);
2838     t0 = tcg_temp_new();
2839     gen_addr_reg_index(ctx, t0);
2840     t1 = tcg_constant_i32(rD(ctx->opcode));
2841     t2 = tcg_constant_i32(rA(ctx->opcode));
2842     t3 = tcg_constant_i32(rB(ctx->opcode));
2843     gen_helper_lswx(tcg_env, t0, t1, t2, t3);
2844 }
2845 
2846 /* stswi */
2847 static void gen_stswi(DisasContext *ctx)
2848 {
2849     TCGv t0;
2850     TCGv_i32 t1, t2;
2851     int nb = NB(ctx->opcode);
2852 
2853     if (ctx->le_mode) {
2854         gen_align_no_le(ctx);
2855         return;
2856     }
2857     gen_set_access_type(ctx, ACCESS_INT);
2858     t0 = tcg_temp_new();
2859     gen_addr_register(ctx, t0);
2860     if (nb == 0) {
2861         nb = 32;
2862     }
2863     t1 = tcg_constant_i32(nb);
2864     t2 = tcg_constant_i32(rS(ctx->opcode));
2865     gen_helper_stsw(tcg_env, t0, t1, t2);
2866 }
2867 
2868 /* stswx */
2869 static void gen_stswx(DisasContext *ctx)
2870 {
2871     TCGv t0;
2872     TCGv_i32 t1, t2;
2873 
2874     if (ctx->le_mode) {
2875         gen_align_no_le(ctx);
2876         return;
2877     }
2878     gen_set_access_type(ctx, ACCESS_INT);
2879     t0 = tcg_temp_new();
2880     gen_addr_reg_index(ctx, t0);
2881     t1 = tcg_temp_new_i32();
2882     tcg_gen_trunc_tl_i32(t1, cpu_xer);
2883     tcg_gen_andi_i32(t1, t1, 0x7F);
2884     t2 = tcg_constant_i32(rS(ctx->opcode));
2885     gen_helper_stsw(tcg_env, t0, t1, t2);
2886 }
2887 
2888 #if !defined(CONFIG_USER_ONLY)
2889 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
2890 {
2891     TCGv_i32 t;
2892     TCGLabel *l;
2893 
2894     if (!ctx->lazy_tlb_flush) {
2895         return;
2896     }
2897     l = gen_new_label();
2898     t = tcg_temp_new_i32();
2899     tcg_gen_ld_i32(t, tcg_env, offsetof(CPUPPCState, tlb_need_flush));
2900     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
2901     if (global) {
2902         gen_helper_check_tlb_flush_global(tcg_env);
2903     } else {
2904         gen_helper_check_tlb_flush_local(tcg_env);
2905     }
2906     gen_set_label(l);
2907     if (global) {
2908         /*
2909          * Global TLB flush uses async-work which must run before the
2910          * next instruction, so this must be the last in the TB.
2911          */
2912         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2913     }
2914 }
2915 #else
2916 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
2917 #endif
2918 
2919 /* isync */
2920 static void gen_isync(DisasContext *ctx)
2921 {
2922     /*
2923      * We need to check for a pending TLB flush. This can only happen in
2924      * kernel mode however so check MSR_PR
2925      */
2926     if (!ctx->pr) {
2927         gen_check_tlb_flush(ctx, false);
2928     }
2929     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2930     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2931 }
2932 
2933 static void gen_load_locked(DisasContext *ctx, MemOp memop)
2934 {
2935     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
2936     TCGv t0 = tcg_temp_new();
2937 
2938     gen_set_access_type(ctx, ACCESS_RES);
2939     gen_addr_reg_index(ctx, t0);
2940     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, DEF_MEMOP(memop) | MO_ALIGN);
2941     tcg_gen_mov_tl(cpu_reserve, t0);
2942     tcg_gen_movi_tl(cpu_reserve_length, memop_size(memop));
2943     tcg_gen_mov_tl(cpu_reserve_val, gpr);
2944 }
2945 
2946 #define LARX(name, memop)                  \
2947 static void gen_##name(DisasContext *ctx)  \
2948 {                                          \
2949     gen_load_locked(ctx, memop);           \
2950 }
2951 
2952 /* lwarx */
2953 LARX(lbarx, MO_UB)
2954 LARX(lharx, MO_UW)
2955 LARX(lwarx, MO_UL)
2956 
2957 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
2958                                       TCGv EA, TCGCond cond, int addend)
2959 {
2960     TCGv t = tcg_temp_new();
2961     TCGv t2 = tcg_temp_new();
2962     TCGv u = tcg_temp_new();
2963 
2964     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
2965     tcg_gen_addi_tl(t2, EA, memop_size(memop));
2966     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
2967     tcg_gen_addi_tl(u, t, addend);
2968 
2969     /* E.g. for fetch and increment bounded... */
2970     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
2971     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
2972     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
2973 
2974     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
2975     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t,
2976                        tcg_constant_tl(1 << (memop_size(memop) * 8 - 1)));
2977 }
2978 
2979 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
2980 {
2981     uint32_t gpr_FC = FC(ctx->opcode);
2982     TCGv EA = tcg_temp_new();
2983     int rt = rD(ctx->opcode);
2984     bool need_serial;
2985     TCGv src, dst;
2986 
2987     gen_addr_register(ctx, EA);
2988     dst = cpu_gpr[rt];
2989     src = cpu_gpr[(rt + 1) & 31];
2990 
2991     need_serial = false;
2992     memop |= MO_ALIGN;
2993     switch (gpr_FC) {
2994     case 0: /* Fetch and add */
2995         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
2996         break;
2997     case 1: /* Fetch and xor */
2998         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
2999         break;
3000     case 2: /* Fetch and or */
3001         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3002         break;
3003     case 3: /* Fetch and 'and' */
3004         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3005         break;
3006     case 4:  /* Fetch and max unsigned */
3007         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3008         break;
3009     case 5:  /* Fetch and max signed */
3010         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3011         break;
3012     case 6:  /* Fetch and min unsigned */
3013         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3014         break;
3015     case 7:  /* Fetch and min signed */
3016         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3017         break;
3018     case 8: /* Swap */
3019         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3020         break;
3021 
3022     case 16: /* Compare and swap not equal */
3023         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3024             need_serial = true;
3025         } else {
3026             TCGv t0 = tcg_temp_new();
3027             TCGv t1 = tcg_temp_new();
3028 
3029             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3030             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3031                 tcg_gen_mov_tl(t1, src);
3032             } else {
3033                 tcg_gen_ext32u_tl(t1, src);
3034             }
3035             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3036                                cpu_gpr[(rt + 2) & 31], t0);
3037             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3038             tcg_gen_mov_tl(dst, t0);
3039         }
3040         break;
3041 
3042     case 24: /* Fetch and increment bounded */
3043         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3044             need_serial = true;
3045         } else {
3046             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3047         }
3048         break;
3049     case 25: /* Fetch and increment equal */
3050         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3051             need_serial = true;
3052         } else {
3053             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3054         }
3055         break;
3056     case 28: /* Fetch and decrement bounded */
3057         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3058             need_serial = true;
3059         } else {
3060             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3061         }
3062         break;
3063 
3064     default:
3065         /* invoke data storage error handler */
3066         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3067     }
3068 
3069     if (need_serial) {
3070         /* Restart with exclusive lock.  */
3071         gen_helper_exit_atomic(tcg_env);
3072         ctx->base.is_jmp = DISAS_NORETURN;
3073     }
3074 }
3075 
3076 static void gen_lwat(DisasContext *ctx)
3077 {
3078     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3079 }
3080 
3081 #ifdef TARGET_PPC64
3082 static void gen_ldat(DisasContext *ctx)
3083 {
3084     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3085 }
3086 #endif
3087 
3088 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3089 {
3090     uint32_t gpr_FC = FC(ctx->opcode);
3091     TCGv EA = tcg_temp_new();
3092     TCGv src, discard;
3093 
3094     gen_addr_register(ctx, EA);
3095     src = cpu_gpr[rD(ctx->opcode)];
3096     discard = tcg_temp_new();
3097 
3098     memop |= MO_ALIGN;
3099     switch (gpr_FC) {
3100     case 0: /* add and Store */
3101         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3102         break;
3103     case 1: /* xor and Store */
3104         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3105         break;
3106     case 2: /* Or and Store */
3107         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3108         break;
3109     case 3: /* 'and' and Store */
3110         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3111         break;
3112     case 4:  /* Store max unsigned */
3113         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3114         break;
3115     case 5:  /* Store max signed */
3116         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3117         break;
3118     case 6:  /* Store min unsigned */
3119         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3120         break;
3121     case 7:  /* Store min signed */
3122         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3123         break;
3124     case 24: /* Store twin  */
3125         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3126             /* Restart with exclusive lock.  */
3127             gen_helper_exit_atomic(tcg_env);
3128             ctx->base.is_jmp = DISAS_NORETURN;
3129         } else {
3130             TCGv t = tcg_temp_new();
3131             TCGv t2 = tcg_temp_new();
3132             TCGv s = tcg_temp_new();
3133             TCGv s2 = tcg_temp_new();
3134             TCGv ea_plus_s = tcg_temp_new();
3135 
3136             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3137             tcg_gen_addi_tl(ea_plus_s, EA, memop_size(memop));
3138             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3139             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3140             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3141             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3142             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3143         }
3144         break;
3145     default:
3146         /* invoke data storage error handler */
3147         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3148     }
3149 }
3150 
3151 static void gen_stwat(DisasContext *ctx)
3152 {
3153     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3154 }
3155 
3156 #ifdef TARGET_PPC64
3157 static void gen_stdat(DisasContext *ctx)
3158 {
3159     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3160 }
3161 #endif
3162 
3163 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3164 {
3165     TCGLabel *lfail;
3166     TCGv EA;
3167     TCGv cr0;
3168     TCGv t0;
3169     int rs = rS(ctx->opcode);
3170 
3171     lfail = gen_new_label();
3172     EA = tcg_temp_new();
3173     cr0 = tcg_temp_new();
3174     t0 = tcg_temp_new();
3175 
3176     tcg_gen_mov_tl(cr0, cpu_so);
3177     gen_set_access_type(ctx, ACCESS_RES);
3178     gen_addr_reg_index(ctx, EA);
3179     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3180     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, memop_size(memop), lfail);
3181 
3182     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3183                               cpu_gpr[rs], ctx->mem_idx,
3184                               DEF_MEMOP(memop) | MO_ALIGN);
3185     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3186     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3187     tcg_gen_or_tl(cr0, cr0, t0);
3188 
3189     gen_set_label(lfail);
3190     tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3191     tcg_gen_movi_tl(cpu_reserve, -1);
3192 }
3193 
3194 #define STCX(name, memop)                  \
3195 static void gen_##name(DisasContext *ctx)  \
3196 {                                          \
3197     gen_conditional_store(ctx, memop);     \
3198 }
3199 
3200 STCX(stbcx_, MO_UB)
3201 STCX(sthcx_, MO_UW)
3202 STCX(stwcx_, MO_UL)
3203 
3204 #if defined(TARGET_PPC64)
3205 /* ldarx */
3206 LARX(ldarx, MO_UQ)
3207 /* stdcx. */
3208 STCX(stdcx_, MO_UQ)
3209 
3210 /* lqarx */
3211 static void gen_lqarx(DisasContext *ctx)
3212 {
3213     int rd = rD(ctx->opcode);
3214     TCGv EA, hi, lo;
3215     TCGv_i128 t16;
3216 
3217     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3218                  (rd == rB(ctx->opcode)))) {
3219         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3220         return;
3221     }
3222 
3223     gen_set_access_type(ctx, ACCESS_RES);
3224     EA = tcg_temp_new();
3225     gen_addr_reg_index(ctx, EA);
3226 
3227     /* Note that the low part is always in RD+1, even in LE mode.  */
3228     lo = cpu_gpr[rd + 1];
3229     hi = cpu_gpr[rd];
3230 
3231     t16 = tcg_temp_new_i128();
3232     tcg_gen_qemu_ld_i128(t16, EA, ctx->mem_idx, DEF_MEMOP(MO_128 | MO_ALIGN));
3233     tcg_gen_extr_i128_i64(lo, hi, t16);
3234 
3235     tcg_gen_mov_tl(cpu_reserve, EA);
3236     tcg_gen_movi_tl(cpu_reserve_length, 16);
3237     tcg_gen_st_tl(hi, tcg_env, offsetof(CPUPPCState, reserve_val));
3238     tcg_gen_st_tl(lo, tcg_env, offsetof(CPUPPCState, reserve_val2));
3239 }
3240 
3241 /* stqcx. */
3242 static void gen_stqcx_(DisasContext *ctx)
3243 {
3244     TCGLabel *lfail;
3245     TCGv EA, t0, t1;
3246     TCGv cr0;
3247     TCGv_i128 cmp, val;
3248     int rs = rS(ctx->opcode);
3249 
3250     if (unlikely(rs & 1)) {
3251         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3252         return;
3253     }
3254 
3255     lfail = gen_new_label();
3256     EA = tcg_temp_new();
3257     cr0 = tcg_temp_new();
3258 
3259     tcg_gen_mov_tl(cr0, cpu_so);
3260     gen_set_access_type(ctx, ACCESS_RES);
3261     gen_addr_reg_index(ctx, EA);
3262     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3263     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, 16, lfail);
3264 
3265     cmp = tcg_temp_new_i128();
3266     val = tcg_temp_new_i128();
3267 
3268     tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
3269 
3270     /* Note that the low part is always in RS+1, even in LE mode.  */
3271     tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
3272 
3273     tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
3274                                 DEF_MEMOP(MO_128 | MO_ALIGN));
3275 
3276     t0 = tcg_temp_new();
3277     t1 = tcg_temp_new();
3278     tcg_gen_extr_i128_i64(t1, t0, val);
3279 
3280     tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
3281     tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
3282     tcg_gen_or_tl(t0, t0, t1);
3283 
3284     tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
3285     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3286     tcg_gen_or_tl(cr0, cr0, t0);
3287 
3288     gen_set_label(lfail);
3289     tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3290     tcg_gen_movi_tl(cpu_reserve, -1);
3291 }
3292 #endif /* defined(TARGET_PPC64) */
3293 
3294 /* wait */
3295 static void gen_wait(DisasContext *ctx)
3296 {
3297     uint32_t wc;
3298 
3299     if (ctx->insns_flags & PPC_WAIT) {
3300         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
3301 
3302         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
3303             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
3304             wc = WC(ctx->opcode);
3305         } else {
3306             wc = 0;
3307         }
3308 
3309     } else if (ctx->insns_flags2 & PPC2_ISA300) {
3310         /* v3.0 defines a new 'wait' encoding. */
3311         wc = WC(ctx->opcode);
3312         if (ctx->insns_flags2 & PPC2_ISA310) {
3313             uint32_t pl = PL(ctx->opcode);
3314 
3315             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
3316             if (wc == 3) {
3317                 gen_invalid(ctx);
3318                 return;
3319             }
3320 
3321             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
3322             if (pl > 0 && wc != 2) {
3323                 gen_invalid(ctx);
3324                 return;
3325             }
3326 
3327         } else { /* ISA300 */
3328             /* WC 1-3 are reserved */
3329             if (wc > 0) {
3330                 gen_invalid(ctx);
3331                 return;
3332             }
3333         }
3334 
3335     } else {
3336         warn_report("wait instruction decoded with wrong ISA flags.");
3337         gen_invalid(ctx);
3338         return;
3339     }
3340 
3341     /*
3342      * wait without WC field or with WC=0 waits for an exception / interrupt
3343      * to occur.
3344      */
3345     if (wc == 0) {
3346         TCGv_i32 t0 = tcg_constant_i32(1);
3347         tcg_gen_st_i32(t0, tcg_env,
3348                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3349         /* Stop translation, as the CPU is supposed to sleep from now */
3350         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3351     }
3352 
3353     /*
3354      * Other wait types must not just wait until an exception occurs because
3355      * ignoring their other wake-up conditions could cause a hang.
3356      *
3357      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
3358      * no-ops.
3359      *
3360      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
3361      *
3362      * wc=2 waits for an implementation-specific condition, such could be
3363      * always true, so it can be implemented as a no-op.
3364      *
3365      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
3366      *
3367      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
3368      * Reservation-loss may have implementation-specific conditions, so it
3369      * can be implemented as a no-op.
3370      *
3371      * wc=2 waits for an exception or an amount of time to pass. This
3372      * amount is implementation-specific so it can be implemented as a
3373      * no-op.
3374      *
3375      * ISA v3.1 allows for execution to resume "in the rare case of
3376      * an implementation-dependent event", so in any case software must
3377      * not depend on the architected resumption condition to become
3378      * true, so no-op implementations should be architecturally correct
3379      * (if suboptimal).
3380      */
3381 }
3382 
3383 #if defined(TARGET_PPC64)
3384 static void gen_doze(DisasContext *ctx)
3385 {
3386 #if defined(CONFIG_USER_ONLY)
3387     GEN_PRIV(ctx);
3388 #else
3389     TCGv_i32 t;
3390 
3391     CHK_HV(ctx);
3392     translator_io_start(&ctx->base);
3393     t = tcg_constant_i32(PPC_PM_DOZE);
3394     gen_helper_pminsn(tcg_env, t);
3395     /* Stop translation, as the CPU is supposed to sleep from now */
3396     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3397 #endif /* defined(CONFIG_USER_ONLY) */
3398 }
3399 
3400 static void gen_nap(DisasContext *ctx)
3401 {
3402 #if defined(CONFIG_USER_ONLY)
3403     GEN_PRIV(ctx);
3404 #else
3405     TCGv_i32 t;
3406 
3407     CHK_HV(ctx);
3408     translator_io_start(&ctx->base);
3409     t = tcg_constant_i32(PPC_PM_NAP);
3410     gen_helper_pminsn(tcg_env, t);
3411     /* Stop translation, as the CPU is supposed to sleep from now */
3412     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3413 #endif /* defined(CONFIG_USER_ONLY) */
3414 }
3415 
3416 static void gen_stop(DisasContext *ctx)
3417 {
3418 #if defined(CONFIG_USER_ONLY)
3419     GEN_PRIV(ctx);
3420 #else
3421     TCGv_i32 t;
3422 
3423     CHK_HV(ctx);
3424     translator_io_start(&ctx->base);
3425     t = tcg_constant_i32(PPC_PM_STOP);
3426     gen_helper_pminsn(tcg_env, t);
3427     /* Stop translation, as the CPU is supposed to sleep from now */
3428     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3429 #endif /* defined(CONFIG_USER_ONLY) */
3430 }
3431 
3432 static void gen_sleep(DisasContext *ctx)
3433 {
3434 #if defined(CONFIG_USER_ONLY)
3435     GEN_PRIV(ctx);
3436 #else
3437     TCGv_i32 t;
3438 
3439     CHK_HV(ctx);
3440     translator_io_start(&ctx->base);
3441     t = tcg_constant_i32(PPC_PM_SLEEP);
3442     gen_helper_pminsn(tcg_env, t);
3443     /* Stop translation, as the CPU is supposed to sleep from now */
3444     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3445 #endif /* defined(CONFIG_USER_ONLY) */
3446 }
3447 
3448 static void gen_rvwinkle(DisasContext *ctx)
3449 {
3450 #if defined(CONFIG_USER_ONLY)
3451     GEN_PRIV(ctx);
3452 #else
3453     TCGv_i32 t;
3454 
3455     CHK_HV(ctx);
3456     translator_io_start(&ctx->base);
3457     t = tcg_constant_i32(PPC_PM_RVWINKLE);
3458     gen_helper_pminsn(tcg_env, t);
3459     /* Stop translation, as the CPU is supposed to sleep from now */
3460     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3461 #endif /* defined(CONFIG_USER_ONLY) */
3462 }
3463 
3464 static inline TCGv gen_write_bhrb(TCGv_ptr base, TCGv offset, TCGv mask, TCGv value)
3465 {
3466     TCGv_ptr tmp = tcg_temp_new_ptr();
3467 
3468     /* add base and offset to get address of bhrb entry */
3469     tcg_gen_add_ptr(tmp, base, (TCGv_ptr)offset);
3470 
3471     /* store value into bhrb at bhrb_offset */
3472     tcg_gen_st_i64(value, tmp, 0);
3473 
3474     /* add 8 to current bhrb_offset */
3475     tcg_gen_addi_tl(offset, offset, 8);
3476 
3477     /* apply offset mask */
3478     tcg_gen_and_tl(offset, offset, mask);
3479 
3480     return offset;
3481 }
3482 #endif /* #if defined(TARGET_PPC64) */
3483 
3484 static inline void gen_update_branch_history(DisasContext *ctx,
3485                                              target_ulong nip,
3486                                              TCGv target,
3487                                              target_long inst_type)
3488 {
3489 #if defined(TARGET_PPC64)
3490     TCGv_ptr base;
3491     TCGv tmp;
3492     TCGv offset;
3493     TCGv mask;
3494     TCGLabel *no_update;
3495 
3496     if (ctx->has_cfar) {
3497         tcg_gen_movi_tl(cpu_cfar, nip);
3498     }
3499 
3500     if (!ctx->has_bhrb ||
3501         !ctx->bhrb_enable ||
3502         inst_type == BHRB_TYPE_NORECORD) {
3503         return;
3504     }
3505 
3506     tmp = tcg_temp_new();
3507     no_update = gen_new_label();
3508 
3509     /* check for bhrb filtering */
3510     tcg_gen_ld_tl(tmp, tcg_env, offsetof(CPUPPCState, bhrb_filter));
3511     tcg_gen_andi_tl(tmp, tmp, inst_type);
3512     tcg_gen_brcondi_tl(TCG_COND_EQ, tmp, 0, no_update);
3513 
3514     base = tcg_temp_new_ptr();
3515     offset = tcg_temp_new();
3516     mask = tcg_temp_new();
3517 
3518     /* load bhrb base address */
3519     tcg_gen_ld_ptr(base, tcg_env, offsetof(CPUPPCState, bhrb_base));
3520 
3521     /* load current bhrb_offset */
3522     tcg_gen_ld_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3523 
3524     /* load a BHRB offset mask */
3525     tcg_gen_ld_tl(mask, tcg_env, offsetof(CPUPPCState, bhrb_offset_mask));
3526 
3527     offset = gen_write_bhrb(base, offset, mask, tcg_constant_i64(nip));
3528 
3529     /* Also record the target address for XL-Form branches */
3530     if (inst_type & BHRB_TYPE_XL_FORM) {
3531 
3532         /* Set the 'T' bit for target entries */
3533         tcg_gen_ori_tl(tmp, target, 0x2);
3534 
3535         offset = gen_write_bhrb(base, offset, mask, tmp);
3536     }
3537 
3538     /* save updated bhrb_offset for next time */
3539     tcg_gen_st_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3540 
3541     gen_set_label(no_update);
3542 #endif
3543 }
3544 
3545 #if defined(TARGET_PPC64)
3546 static void pmu_count_insns(DisasContext *ctx)
3547 {
3548     /*
3549      * Do not bother calling the helper if the PMU isn't counting
3550      * instructions.
3551      */
3552     if (!ctx->pmu_insn_cnt) {
3553         return;
3554     }
3555 
3556  #if !defined(CONFIG_USER_ONLY)
3557     TCGLabel *l;
3558     TCGv t0;
3559 
3560     /*
3561      * The PMU insns_inc() helper stops the internal PMU timer if a
3562      * counter overflows happens. In that case, if the guest is
3563      * running with icount and we do not handle it beforehand,
3564      * the helper can trigger a 'bad icount read'.
3565      */
3566     translator_io_start(&ctx->base);
3567 
3568     /* Avoid helper calls when only PMC5-6 are enabled. */
3569     if (!ctx->pmc_other) {
3570         l = gen_new_label();
3571         t0 = tcg_temp_new();
3572 
3573         gen_load_spr(t0, SPR_POWER_PMC5);
3574         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3575         gen_store_spr(SPR_POWER_PMC5, t0);
3576         /* Check for overflow, if it's enabled */
3577         if (ctx->mmcr0_pmcjce) {
3578             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
3579             gen_helper_handle_pmc5_overflow(tcg_env);
3580         }
3581 
3582         gen_set_label(l);
3583     } else {
3584         gen_helper_insns_inc(tcg_env, tcg_constant_i32(ctx->base.num_insns));
3585     }
3586   #else
3587     /*
3588      * User mode can read (but not write) PMC5 and start/stop
3589      * the PMU via MMCR0_FC. In this case just increment
3590      * PMC5 with base.num_insns.
3591      */
3592     TCGv t0 = tcg_temp_new();
3593 
3594     gen_load_spr(t0, SPR_POWER_PMC5);
3595     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3596     gen_store_spr(SPR_POWER_PMC5, t0);
3597   #endif /* #if !defined(CONFIG_USER_ONLY) */
3598 }
3599 #else
3600 static void pmu_count_insns(DisasContext *ctx)
3601 {
3602     return;
3603 }
3604 #endif /* #if defined(TARGET_PPC64) */
3605 
3606 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
3607 {
3608     if (unlikely(ctx->singlestep_enabled)) {
3609         return false;
3610     }
3611     return translator_use_goto_tb(&ctx->base, dest);
3612 }
3613 
3614 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
3615 {
3616     if (unlikely(ctx->singlestep_enabled)) {
3617         gen_debug_exception(ctx, false);
3618     } else {
3619         /*
3620          * tcg_gen_lookup_and_goto_ptr will exit the TB if
3621          * CF_NO_GOTO_PTR is set. Count insns now.
3622          */
3623         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
3624             pmu_count_insns(ctx);
3625         }
3626 
3627         tcg_gen_lookup_and_goto_ptr();
3628     }
3629 }
3630 
3631 /***                                Branch                                 ***/
3632 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3633 {
3634     if (NARROW_MODE(ctx)) {
3635         dest = (uint32_t) dest;
3636     }
3637     if (use_goto_tb(ctx, dest)) {
3638         pmu_count_insns(ctx);
3639         tcg_gen_goto_tb(n);
3640         tcg_gen_movi_tl(cpu_nip, dest & ~3);
3641         tcg_gen_exit_tb(ctx->base.tb, n);
3642     } else {
3643         tcg_gen_movi_tl(cpu_nip, dest & ~3);
3644         gen_lookup_and_goto_ptr(ctx);
3645     }
3646 }
3647 
3648 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3649 {
3650     if (NARROW_MODE(ctx)) {
3651         nip = (uint32_t)nip;
3652     }
3653     tcg_gen_movi_tl(cpu_lr, nip);
3654 }
3655 
3656 /* b ba bl bla */
3657 static void gen_b(DisasContext *ctx)
3658 {
3659     target_ulong li, target;
3660 
3661     /* sign extend LI */
3662     li = LI(ctx->opcode);
3663     li = (li ^ 0x02000000) - 0x02000000;
3664     if (likely(AA(ctx->opcode) == 0)) {
3665         target = ctx->cia + li;
3666     } else {
3667         target = li;
3668     }
3669     if (LK(ctx->opcode)) {
3670         gen_setlr(ctx, ctx->base.pc_next);
3671         gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_CALL);
3672     } else {
3673         gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_OTHER);
3674     }
3675     gen_goto_tb(ctx, 0, target);
3676     ctx->base.is_jmp = DISAS_NORETURN;
3677 }
3678 
3679 #define BCOND_IM  0
3680 #define BCOND_LR  1
3681 #define BCOND_CTR 2
3682 #define BCOND_TAR 3
3683 
3684 static void gen_bcond(DisasContext *ctx, int type)
3685 {
3686     uint32_t bo = BO(ctx->opcode);
3687     TCGLabel *l1;
3688     TCGv target;
3689     target_long bhrb_type = BHRB_TYPE_OTHER;
3690 
3691     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3692         target = tcg_temp_new();
3693         if (type == BCOND_CTR) {
3694             tcg_gen_mov_tl(target, cpu_ctr);
3695         } else if (type == BCOND_TAR) {
3696             gen_load_spr(target, SPR_TAR);
3697         } else {
3698             tcg_gen_mov_tl(target, cpu_lr);
3699         }
3700         if (!LK(ctx->opcode)) {
3701             bhrb_type |= BHRB_TYPE_INDIRECT;
3702         }
3703         bhrb_type |= BHRB_TYPE_XL_FORM;
3704     } else {
3705         target = NULL;
3706     }
3707     if (LK(ctx->opcode)) {
3708         gen_setlr(ctx, ctx->base.pc_next);
3709         bhrb_type |= BHRB_TYPE_CALL;
3710     }
3711     l1 = gen_new_label();
3712     if ((bo & 0x4) == 0) {
3713         /* Decrement and test CTR */
3714         TCGv temp = tcg_temp_new();
3715 
3716         if (type == BCOND_CTR) {
3717             /*
3718              * All ISAs up to v3 describe this form of bcctr as invalid but
3719              * some processors, ie. 64-bit server processors compliant with
3720              * arch 2.x, do implement a "test and decrement" logic instead,
3721              * as described in their respective UMs. This logic involves CTR
3722              * to act as both the branch target and a counter, which makes
3723              * it basically useless and thus never used in real code.
3724              *
3725              * This form was hence chosen to trigger extra micro-architectural
3726              * side-effect on real HW needed for the Spectre v2 workaround.
3727              * It is up to guests that implement such workaround, ie. linux, to
3728              * use this form in a way it just triggers the side-effect without
3729              * doing anything else harmful.
3730              */
3731             if (unlikely(!is_book3s_arch2x(ctx))) {
3732                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3733                 return;
3734             }
3735 
3736             if (NARROW_MODE(ctx)) {
3737                 tcg_gen_ext32u_tl(temp, cpu_ctr);
3738             } else {
3739                 tcg_gen_mov_tl(temp, cpu_ctr);
3740             }
3741             if (bo & 0x2) {
3742                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3743             } else {
3744                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3745             }
3746             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3747         } else {
3748             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3749             if (NARROW_MODE(ctx)) {
3750                 tcg_gen_ext32u_tl(temp, cpu_ctr);
3751             } else {
3752                 tcg_gen_mov_tl(temp, cpu_ctr);
3753             }
3754             if (bo & 0x2) {
3755                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3756             } else {
3757                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3758             }
3759         }
3760         bhrb_type |= BHRB_TYPE_COND;
3761     }
3762     if ((bo & 0x10) == 0) {
3763         /* Test CR */
3764         uint32_t bi = BI(ctx->opcode);
3765         uint32_t mask = 0x08 >> (bi & 0x03);
3766         TCGv_i32 temp = tcg_temp_new_i32();
3767 
3768         if (bo & 0x8) {
3769             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3770             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3771         } else {
3772             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3773             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3774         }
3775         bhrb_type |= BHRB_TYPE_COND;
3776     }
3777 
3778     gen_update_branch_history(ctx, ctx->cia, target, bhrb_type);
3779 
3780     if (type == BCOND_IM) {
3781         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3782         if (likely(AA(ctx->opcode) == 0)) {
3783             gen_goto_tb(ctx, 0, ctx->cia + li);
3784         } else {
3785             gen_goto_tb(ctx, 0, li);
3786         }
3787     } else {
3788         if (NARROW_MODE(ctx)) {
3789             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3790         } else {
3791             tcg_gen_andi_tl(cpu_nip, target, ~3);
3792         }
3793         gen_lookup_and_goto_ptr(ctx);
3794     }
3795     if ((bo & 0x14) != 0x14) {
3796         /* fallthrough case */
3797         gen_set_label(l1);
3798         gen_goto_tb(ctx, 1, ctx->base.pc_next);
3799     }
3800     ctx->base.is_jmp = DISAS_NORETURN;
3801 }
3802 
3803 static void gen_bc(DisasContext *ctx)
3804 {
3805     gen_bcond(ctx, BCOND_IM);
3806 }
3807 
3808 static void gen_bcctr(DisasContext *ctx)
3809 {
3810     gen_bcond(ctx, BCOND_CTR);
3811 }
3812 
3813 static void gen_bclr(DisasContext *ctx)
3814 {
3815     gen_bcond(ctx, BCOND_LR);
3816 }
3817 
3818 static void gen_bctar(DisasContext *ctx)
3819 {
3820     gen_bcond(ctx, BCOND_TAR);
3821 }
3822 
3823 /***                      Condition register logical                       ***/
3824 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
3825 static void glue(gen_, name)(DisasContext *ctx)                               \
3826 {                                                                             \
3827     uint8_t bitmask;                                                          \
3828     int sh;                                                                   \
3829     TCGv_i32 t0, t1;                                                          \
3830     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
3831     t0 = tcg_temp_new_i32();                                                  \
3832     if (sh > 0)                                                               \
3833         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
3834     else if (sh < 0)                                                          \
3835         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
3836     else                                                                      \
3837         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
3838     t1 = tcg_temp_new_i32();                                                  \
3839     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
3840     if (sh > 0)                                                               \
3841         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
3842     else if (sh < 0)                                                          \
3843         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
3844     else                                                                      \
3845         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
3846     tcg_op(t0, t0, t1);                                                       \
3847     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
3848     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
3849     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
3850     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
3851 }
3852 
3853 /* crand */
3854 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3855 /* crandc */
3856 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3857 /* creqv */
3858 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3859 /* crnand */
3860 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3861 /* crnor */
3862 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3863 /* cror */
3864 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3865 /* crorc */
3866 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3867 /* crxor */
3868 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3869 
3870 /* mcrf */
3871 static void gen_mcrf(DisasContext *ctx)
3872 {
3873     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3874 }
3875 
3876 /***                           System linkage                              ***/
3877 
3878 /* rfi (supervisor only) */
3879 static void gen_rfi(DisasContext *ctx)
3880 {
3881 #if defined(CONFIG_USER_ONLY)
3882     GEN_PRIV(ctx);
3883 #else
3884     /*
3885      * This instruction doesn't exist anymore on 64-bit server
3886      * processors compliant with arch 2.x
3887      */
3888     if (is_book3s_arch2x(ctx)) {
3889         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3890         return;
3891     }
3892     /* Restore CPU state */
3893     CHK_SV(ctx);
3894     translator_io_start(&ctx->base);
3895     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3896     gen_helper_rfi(tcg_env);
3897     ctx->base.is_jmp = DISAS_EXIT;
3898 #endif
3899 }
3900 
3901 #if defined(TARGET_PPC64)
3902 static void gen_rfid(DisasContext *ctx)
3903 {
3904 #if defined(CONFIG_USER_ONLY)
3905     GEN_PRIV(ctx);
3906 #else
3907     /* Restore CPU state */
3908     CHK_SV(ctx);
3909     translator_io_start(&ctx->base);
3910     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3911     gen_helper_rfid(tcg_env);
3912     ctx->base.is_jmp = DISAS_EXIT;
3913 #endif
3914 }
3915 
3916 #if !defined(CONFIG_USER_ONLY)
3917 static void gen_rfscv(DisasContext *ctx)
3918 {
3919 #if defined(CONFIG_USER_ONLY)
3920     GEN_PRIV(ctx);
3921 #else
3922     /* Restore CPU state */
3923     CHK_SV(ctx);
3924     translator_io_start(&ctx->base);
3925     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3926     gen_helper_rfscv(tcg_env);
3927     ctx->base.is_jmp = DISAS_EXIT;
3928 #endif
3929 }
3930 #endif
3931 
3932 static void gen_hrfid(DisasContext *ctx)
3933 {
3934 #if defined(CONFIG_USER_ONLY)
3935     GEN_PRIV(ctx);
3936 #else
3937     /* Restore CPU state */
3938     CHK_HV(ctx);
3939     translator_io_start(&ctx->base);
3940     gen_helper_hrfid(tcg_env);
3941     ctx->base.is_jmp = DISAS_EXIT;
3942 #endif
3943 }
3944 #endif
3945 
3946 /* sc */
3947 #if defined(CONFIG_USER_ONLY)
3948 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3949 #else
3950 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3951 #endif
3952 static void gen_sc(DisasContext *ctx)
3953 {
3954     uint32_t lev;
3955 
3956     /*
3957      * LEV is a 7-bit field, but the top 6 bits are treated as a reserved
3958      * field (i.e., ignored). ISA v3.1 changes that to 5 bits, but that is
3959      * for Ultravisor which TCG does not support, so just ignore the top 6.
3960      */
3961     lev = (ctx->opcode >> 5) & 0x1;
3962     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3963 }
3964 
3965 #if defined(TARGET_PPC64)
3966 #if !defined(CONFIG_USER_ONLY)
3967 static void gen_scv(DisasContext *ctx)
3968 {
3969     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
3970 
3971     /* Set the PC back to the faulting instruction. */
3972     gen_update_nip(ctx, ctx->cia);
3973     gen_helper_scv(tcg_env, tcg_constant_i32(lev));
3974 
3975     ctx->base.is_jmp = DISAS_NORETURN;
3976 }
3977 #endif
3978 #endif
3979 
3980 /***                                Trap                                   ***/
3981 
3982 /* Check for unconditional traps (always or never) */
3983 static bool check_unconditional_trap(DisasContext *ctx, int to)
3984 {
3985     /* Trap never */
3986     if (to == 0) {
3987         return true;
3988     }
3989     /* Trap always */
3990     if (to == 31) {
3991         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
3992         return true;
3993     }
3994     return false;
3995 }
3996 
3997 /***                          Processor control                            ***/
3998 
3999 /* mcrxr */
4000 static void gen_mcrxr(DisasContext *ctx)
4001 {
4002     TCGv_i32 t0 = tcg_temp_new_i32();
4003     TCGv_i32 t1 = tcg_temp_new_i32();
4004     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4005 
4006     tcg_gen_trunc_tl_i32(t0, cpu_so);
4007     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4008     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4009     tcg_gen_shli_i32(t0, t0, 3);
4010     tcg_gen_shli_i32(t1, t1, 2);
4011     tcg_gen_shli_i32(dst, dst, 1);
4012     tcg_gen_or_i32(dst, dst, t0);
4013     tcg_gen_or_i32(dst, dst, t1);
4014 
4015     tcg_gen_movi_tl(cpu_so, 0);
4016     tcg_gen_movi_tl(cpu_ov, 0);
4017     tcg_gen_movi_tl(cpu_ca, 0);
4018 }
4019 
4020 #ifdef TARGET_PPC64
4021 /* mcrxrx */
4022 static void gen_mcrxrx(DisasContext *ctx)
4023 {
4024     TCGv t0 = tcg_temp_new();
4025     TCGv t1 = tcg_temp_new();
4026     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4027 
4028     /* copy OV and OV32 */
4029     tcg_gen_shli_tl(t0, cpu_ov, 1);
4030     tcg_gen_or_tl(t0, t0, cpu_ov32);
4031     tcg_gen_shli_tl(t0, t0, 2);
4032     /* copy CA and CA32 */
4033     tcg_gen_shli_tl(t1, cpu_ca, 1);
4034     tcg_gen_or_tl(t1, t1, cpu_ca32);
4035     tcg_gen_or_tl(t0, t0, t1);
4036     tcg_gen_trunc_tl_i32(dst, t0);
4037 }
4038 #endif
4039 
4040 /* mfcr mfocrf */
4041 static void gen_mfcr(DisasContext *ctx)
4042 {
4043     uint32_t crm, crn;
4044 
4045     if (likely(ctx->opcode & 0x00100000)) {
4046         crm = CRM(ctx->opcode);
4047         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4048             crn = ctz32(crm);
4049             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4050             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4051                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4052         }
4053     } else {
4054         TCGv_i32 t0 = tcg_temp_new_i32();
4055         tcg_gen_mov_i32(t0, cpu_crf[0]);
4056         tcg_gen_shli_i32(t0, t0, 4);
4057         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4058         tcg_gen_shli_i32(t0, t0, 4);
4059         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4060         tcg_gen_shli_i32(t0, t0, 4);
4061         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4062         tcg_gen_shli_i32(t0, t0, 4);
4063         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4064         tcg_gen_shli_i32(t0, t0, 4);
4065         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4066         tcg_gen_shli_i32(t0, t0, 4);
4067         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4068         tcg_gen_shli_i32(t0, t0, 4);
4069         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4070         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4071     }
4072 }
4073 
4074 /* mfmsr */
4075 static void gen_mfmsr(DisasContext *ctx)
4076 {
4077     CHK_SV(ctx);
4078     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4079 }
4080 
4081 /* mfspr */
4082 static inline void gen_op_mfspr(DisasContext *ctx)
4083 {
4084     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4085     uint32_t sprn = SPR(ctx->opcode);
4086 
4087 #if defined(CONFIG_USER_ONLY)
4088     read_cb = ctx->spr_cb[sprn].uea_read;
4089 #else
4090     if (ctx->pr) {
4091         read_cb = ctx->spr_cb[sprn].uea_read;
4092     } else if (ctx->hv) {
4093         read_cb = ctx->spr_cb[sprn].hea_read;
4094     } else {
4095         read_cb = ctx->spr_cb[sprn].oea_read;
4096     }
4097 #endif
4098     if (likely(read_cb != NULL)) {
4099         if (likely(read_cb != SPR_NOACCESS)) {
4100             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4101         } else {
4102             /* Privilege exception */
4103             /*
4104              * This is a hack to avoid warnings when running Linux:
4105              * this OS breaks the PowerPC virtualisation model,
4106              * allowing userland application to read the PVR
4107              */
4108             if (sprn != SPR_PVR) {
4109                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4110                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4111                               ctx->cia);
4112             }
4113             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4114         }
4115     } else {
4116         /* ISA 2.07 defines these as no-ops */
4117         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4118             (sprn >= 808 && sprn <= 811)) {
4119             /* This is a nop */
4120             return;
4121         }
4122         /* Not defined */
4123         qemu_log_mask(LOG_GUEST_ERROR,
4124                       "Trying to read invalid spr %d (0x%03x) at "
4125                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4126 
4127         /*
4128          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4129          * generate a priv, a hv emu or a no-op
4130          */
4131         if (sprn & 0x10) {
4132             if (ctx->pr) {
4133                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4134             }
4135         } else {
4136             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4137                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4138             }
4139         }
4140     }
4141 }
4142 
4143 static void gen_mfspr(DisasContext *ctx)
4144 {
4145     gen_op_mfspr(ctx);
4146 }
4147 
4148 /* mftb */
4149 static void gen_mftb(DisasContext *ctx)
4150 {
4151     gen_op_mfspr(ctx);
4152 }
4153 
4154 /* mtcrf mtocrf*/
4155 static void gen_mtcrf(DisasContext *ctx)
4156 {
4157     uint32_t crm, crn;
4158 
4159     crm = CRM(ctx->opcode);
4160     if (likely((ctx->opcode & 0x00100000))) {
4161         if (crm && ((crm & (crm - 1)) == 0)) {
4162             TCGv_i32 temp = tcg_temp_new_i32();
4163             crn = ctz32(crm);
4164             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4165             tcg_gen_shri_i32(temp, temp, crn * 4);
4166             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4167         }
4168     } else {
4169         TCGv_i32 temp = tcg_temp_new_i32();
4170         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4171         for (crn = 0 ; crn < 8 ; crn++) {
4172             if (crm & (1 << crn)) {
4173                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4174                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4175             }
4176         }
4177     }
4178 }
4179 
4180 /* mtmsr */
4181 #if defined(TARGET_PPC64)
4182 static void gen_mtmsrd(DisasContext *ctx)
4183 {
4184     if (unlikely(!is_book3s_arch2x(ctx))) {
4185         gen_invalid(ctx);
4186         return;
4187     }
4188 
4189     CHK_SV(ctx);
4190 
4191 #if !defined(CONFIG_USER_ONLY)
4192     TCGv t0, t1;
4193     target_ulong mask;
4194 
4195     t0 = tcg_temp_new();
4196     t1 = tcg_temp_new();
4197 
4198     translator_io_start(&ctx->base);
4199 
4200     if (ctx->opcode & 0x00010000) {
4201         /* L=1 form only updates EE and RI */
4202         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4203     } else {
4204         /* mtmsrd does not alter HV, S, ME, or LE */
4205         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4206                  (1ULL << MSR_HV));
4207         /*
4208          * XXX: we need to update nip before the store if we enter
4209          *      power saving mode, we will exit the loop directly from
4210          *      ppc_store_msr
4211          */
4212         gen_update_nip(ctx, ctx->base.pc_next);
4213     }
4214 
4215     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4216     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4217     tcg_gen_or_tl(t0, t0, t1);
4218 
4219     gen_helper_store_msr(tcg_env, t0);
4220 
4221     /* Must stop the translation as machine state (may have) changed */
4222     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4223 #endif /* !defined(CONFIG_USER_ONLY) */
4224 }
4225 #endif /* defined(TARGET_PPC64) */
4226 
4227 static void gen_mtmsr(DisasContext *ctx)
4228 {
4229     CHK_SV(ctx);
4230 
4231 #if !defined(CONFIG_USER_ONLY)
4232     TCGv t0, t1;
4233     target_ulong mask = 0xFFFFFFFF;
4234 
4235     t0 = tcg_temp_new();
4236     t1 = tcg_temp_new();
4237 
4238     translator_io_start(&ctx->base);
4239     if (ctx->opcode & 0x00010000) {
4240         /* L=1 form only updates EE and RI */
4241         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4242     } else {
4243         /* mtmsr does not alter S, ME, or LE */
4244         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4245 
4246         /*
4247          * XXX: we need to update nip before the store if we enter
4248          *      power saving mode, we will exit the loop directly from
4249          *      ppc_store_msr
4250          */
4251         gen_update_nip(ctx, ctx->base.pc_next);
4252     }
4253 
4254     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4255     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4256     tcg_gen_or_tl(t0, t0, t1);
4257 
4258     gen_helper_store_msr(tcg_env, t0);
4259 
4260     /* Must stop the translation as machine state (may have) changed */
4261     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4262 #endif
4263 }
4264 
4265 /* mtspr */
4266 static void gen_mtspr(DisasContext *ctx)
4267 {
4268     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4269     uint32_t sprn = SPR(ctx->opcode);
4270 
4271 #if defined(CONFIG_USER_ONLY)
4272     write_cb = ctx->spr_cb[sprn].uea_write;
4273 #else
4274     if (ctx->pr) {
4275         write_cb = ctx->spr_cb[sprn].uea_write;
4276     } else if (ctx->hv) {
4277         write_cb = ctx->spr_cb[sprn].hea_write;
4278     } else {
4279         write_cb = ctx->spr_cb[sprn].oea_write;
4280     }
4281 #endif
4282     if (likely(write_cb != NULL)) {
4283         if (likely(write_cb != SPR_NOACCESS)) {
4284             (*write_cb)(ctx, sprn, rS(ctx->opcode));
4285         } else {
4286             /* Privilege exception */
4287             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4288                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4289                           ctx->cia);
4290             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4291         }
4292     } else {
4293         /* ISA 2.07 defines these as no-ops */
4294         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4295             (sprn >= 808 && sprn <= 811)) {
4296             /* This is a nop */
4297             return;
4298         }
4299 
4300         /* Not defined */
4301         qemu_log_mask(LOG_GUEST_ERROR,
4302                       "Trying to write invalid spr %d (0x%03x) at "
4303                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4304 
4305 
4306         /*
4307          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4308          * generate a priv, a hv emu or a no-op
4309          */
4310         if (sprn & 0x10) {
4311             if (ctx->pr) {
4312                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4313             }
4314         } else {
4315             if (ctx->pr || sprn == 0) {
4316                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4317             }
4318         }
4319     }
4320 }
4321 
4322 #if defined(TARGET_PPC64)
4323 /* setb */
4324 static void gen_setb(DisasContext *ctx)
4325 {
4326     TCGv_i32 t0 = tcg_temp_new_i32();
4327     TCGv_i32 t8 = tcg_constant_i32(8);
4328     TCGv_i32 tm1 = tcg_constant_i32(-1);
4329     int crf = crfS(ctx->opcode);
4330 
4331     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4332     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4333     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4334 }
4335 #endif
4336 
4337 /***                         Cache management                              ***/
4338 
4339 /* dcbf */
4340 static void gen_dcbf(DisasContext *ctx)
4341 {
4342     /* XXX: specification says this is treated as a load by the MMU */
4343     TCGv t0;
4344     gen_set_access_type(ctx, ACCESS_CACHE);
4345     t0 = tcg_temp_new();
4346     gen_addr_reg_index(ctx, t0);
4347     gen_qemu_ld8u(ctx, t0, t0);
4348 }
4349 
4350 /* dcbfep (external PID dcbf) */
4351 static void gen_dcbfep(DisasContext *ctx)
4352 {
4353     /* XXX: specification says this is treated as a load by the MMU */
4354     TCGv t0;
4355     CHK_SV(ctx);
4356     gen_set_access_type(ctx, ACCESS_CACHE);
4357     t0 = tcg_temp_new();
4358     gen_addr_reg_index(ctx, t0);
4359     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4360 }
4361 
4362 /* dcbi (Supervisor only) */
4363 static void gen_dcbi(DisasContext *ctx)
4364 {
4365 #if defined(CONFIG_USER_ONLY)
4366     GEN_PRIV(ctx);
4367 #else
4368     TCGv EA, val;
4369 
4370     CHK_SV(ctx);
4371     EA = tcg_temp_new();
4372     gen_set_access_type(ctx, ACCESS_CACHE);
4373     gen_addr_reg_index(ctx, EA);
4374     val = tcg_temp_new();
4375     /* XXX: specification says this should be treated as a store by the MMU */
4376     gen_qemu_ld8u(ctx, val, EA);
4377     gen_qemu_st8(ctx, val, EA);
4378 #endif /* defined(CONFIG_USER_ONLY) */
4379 }
4380 
4381 /* dcdst */
4382 static void gen_dcbst(DisasContext *ctx)
4383 {
4384     /* XXX: specification say this is treated as a load by the MMU */
4385     TCGv t0;
4386     gen_set_access_type(ctx, ACCESS_CACHE);
4387     t0 = tcg_temp_new();
4388     gen_addr_reg_index(ctx, t0);
4389     gen_qemu_ld8u(ctx, t0, t0);
4390 }
4391 
4392 /* dcbstep (dcbstep External PID version) */
4393 static void gen_dcbstep(DisasContext *ctx)
4394 {
4395     /* XXX: specification say this is treated as a load by the MMU */
4396     TCGv t0;
4397     gen_set_access_type(ctx, ACCESS_CACHE);
4398     t0 = tcg_temp_new();
4399     gen_addr_reg_index(ctx, t0);
4400     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4401 }
4402 
4403 /* dcbt */
4404 static void gen_dcbt(DisasContext *ctx)
4405 {
4406     /*
4407      * interpreted as no-op
4408      * XXX: specification say this is treated as a load by the MMU but
4409      *      does not generate any exception
4410      */
4411 }
4412 
4413 /* dcbtep */
4414 static void gen_dcbtep(DisasContext *ctx)
4415 {
4416     /*
4417      * interpreted as no-op
4418      * XXX: specification say this is treated as a load by the MMU but
4419      *      does not generate any exception
4420      */
4421 }
4422 
4423 /* dcbtst */
4424 static void gen_dcbtst(DisasContext *ctx)
4425 {
4426     /*
4427      * interpreted as no-op
4428      * XXX: specification say this is treated as a load by the MMU but
4429      *      does not generate any exception
4430      */
4431 }
4432 
4433 /* dcbtstep */
4434 static void gen_dcbtstep(DisasContext *ctx)
4435 {
4436     /*
4437      * interpreted as no-op
4438      * XXX: specification say this is treated as a load by the MMU but
4439      *      does not generate any exception
4440      */
4441 }
4442 
4443 /* dcbtls */
4444 static void gen_dcbtls(DisasContext *ctx)
4445 {
4446     /* Always fails locking the cache */
4447     TCGv t0 = tcg_temp_new();
4448     gen_load_spr(t0, SPR_Exxx_L1CSR0);
4449     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4450     gen_store_spr(SPR_Exxx_L1CSR0, t0);
4451 }
4452 
4453 /* dcblc */
4454 static void gen_dcblc(DisasContext *ctx)
4455 {
4456     /*
4457      * interpreted as no-op
4458      */
4459 }
4460 
4461 /* dcbz */
4462 static void gen_dcbz(DisasContext *ctx)
4463 {
4464     TCGv tcgv_addr = tcg_temp_new();
4465 
4466     gen_set_access_type(ctx, ACCESS_CACHE);
4467     gen_addr_reg_index(ctx, tcgv_addr);
4468 
4469 #ifdef TARGET_PPC64
4470     if (ctx->excp_model == POWERPC_EXCP_970 && !(ctx->opcode & 0x00200000)) {
4471         gen_helper_dcbzl(tcg_env, tcgv_addr);
4472         return;
4473     }
4474 #endif
4475 
4476     gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(ctx->mem_idx));
4477 }
4478 
4479 /* dcbzep */
4480 static void gen_dcbzep(DisasContext *ctx)
4481 {
4482     TCGv tcgv_addr = tcg_temp_new();
4483 
4484     gen_set_access_type(ctx, ACCESS_CACHE);
4485     gen_addr_reg_index(ctx, tcgv_addr);
4486     gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(PPC_TLB_EPID_STORE));
4487 }
4488 
4489 /* dst / dstt */
4490 static void gen_dst(DisasContext *ctx)
4491 {
4492     if (rA(ctx->opcode) == 0) {
4493         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4494     } else {
4495         /* interpreted as no-op */
4496     }
4497 }
4498 
4499 /* dstst /dststt */
4500 static void gen_dstst(DisasContext *ctx)
4501 {
4502     if (rA(ctx->opcode) == 0) {
4503         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4504     } else {
4505         /* interpreted as no-op */
4506     }
4507 
4508 }
4509 
4510 /* dss / dssall */
4511 static void gen_dss(DisasContext *ctx)
4512 {
4513     /* interpreted as no-op */
4514 }
4515 
4516 /* icbi */
4517 static void gen_icbi(DisasContext *ctx)
4518 {
4519     TCGv t0;
4520     gen_set_access_type(ctx, ACCESS_CACHE);
4521     t0 = tcg_temp_new();
4522     gen_addr_reg_index(ctx, t0);
4523     gen_helper_icbi(tcg_env, t0);
4524 }
4525 
4526 /* icbiep */
4527 static void gen_icbiep(DisasContext *ctx)
4528 {
4529     TCGv t0;
4530     gen_set_access_type(ctx, ACCESS_CACHE);
4531     t0 = tcg_temp_new();
4532     gen_addr_reg_index(ctx, t0);
4533     gen_helper_icbiep(tcg_env, t0);
4534 }
4535 
4536 /* Optional: */
4537 /* dcba */
4538 static void gen_dcba(DisasContext *ctx)
4539 {
4540     /*
4541      * interpreted as no-op
4542      * XXX: specification say this is treated as a store by the MMU
4543      *      but does not generate any exception
4544      */
4545 }
4546 
4547 /***                    Segment register manipulation                      ***/
4548 /* Supervisor only: */
4549 
4550 /* mfsr */
4551 static void gen_mfsr(DisasContext *ctx)
4552 {
4553 #if defined(CONFIG_USER_ONLY)
4554     GEN_PRIV(ctx);
4555 #else
4556     TCGv t0;
4557 
4558     CHK_SV(ctx);
4559     t0 = tcg_constant_tl(SR(ctx->opcode));
4560     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4561 #endif /* defined(CONFIG_USER_ONLY) */
4562 }
4563 
4564 /* mfsrin */
4565 static void gen_mfsrin(DisasContext *ctx)
4566 {
4567 #if defined(CONFIG_USER_ONLY)
4568     GEN_PRIV(ctx);
4569 #else
4570     TCGv t0;
4571 
4572     CHK_SV(ctx);
4573     t0 = tcg_temp_new();
4574     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4575     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4576 #endif /* defined(CONFIG_USER_ONLY) */
4577 }
4578 
4579 /* mtsr */
4580 static void gen_mtsr(DisasContext *ctx)
4581 {
4582 #if defined(CONFIG_USER_ONLY)
4583     GEN_PRIV(ctx);
4584 #else
4585     TCGv t0;
4586 
4587     CHK_SV(ctx);
4588     t0 = tcg_constant_tl(SR(ctx->opcode));
4589     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4590 #endif /* defined(CONFIG_USER_ONLY) */
4591 }
4592 
4593 /* mtsrin */
4594 static void gen_mtsrin(DisasContext *ctx)
4595 {
4596 #if defined(CONFIG_USER_ONLY)
4597     GEN_PRIV(ctx);
4598 #else
4599     TCGv t0;
4600     CHK_SV(ctx);
4601 
4602     t0 = tcg_temp_new();
4603     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4604     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rD(ctx->opcode)]);
4605 #endif /* defined(CONFIG_USER_ONLY) */
4606 }
4607 
4608 #if defined(TARGET_PPC64)
4609 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4610 
4611 /* mfsr */
4612 static void gen_mfsr_64b(DisasContext *ctx)
4613 {
4614 #if defined(CONFIG_USER_ONLY)
4615     GEN_PRIV(ctx);
4616 #else
4617     TCGv t0;
4618 
4619     CHK_SV(ctx);
4620     t0 = tcg_constant_tl(SR(ctx->opcode));
4621     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4622 #endif /* defined(CONFIG_USER_ONLY) */
4623 }
4624 
4625 /* mfsrin */
4626 static void gen_mfsrin_64b(DisasContext *ctx)
4627 {
4628 #if defined(CONFIG_USER_ONLY)
4629     GEN_PRIV(ctx);
4630 #else
4631     TCGv t0;
4632 
4633     CHK_SV(ctx);
4634     t0 = tcg_temp_new();
4635     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4636     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4637 #endif /* defined(CONFIG_USER_ONLY) */
4638 }
4639 
4640 /* mtsr */
4641 static void gen_mtsr_64b(DisasContext *ctx)
4642 {
4643 #if defined(CONFIG_USER_ONLY)
4644     GEN_PRIV(ctx);
4645 #else
4646     TCGv t0;
4647 
4648     CHK_SV(ctx);
4649     t0 = tcg_constant_tl(SR(ctx->opcode));
4650     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4651 #endif /* defined(CONFIG_USER_ONLY) */
4652 }
4653 
4654 /* mtsrin */
4655 static void gen_mtsrin_64b(DisasContext *ctx)
4656 {
4657 #if defined(CONFIG_USER_ONLY)
4658     GEN_PRIV(ctx);
4659 #else
4660     TCGv t0;
4661 
4662     CHK_SV(ctx);
4663     t0 = tcg_temp_new();
4664     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4665     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4666 #endif /* defined(CONFIG_USER_ONLY) */
4667 }
4668 
4669 #endif /* defined(TARGET_PPC64) */
4670 
4671 /***                      Lookaside buffer management                      ***/
4672 /* Optional & supervisor only: */
4673 
4674 /* tlbia */
4675 static void gen_tlbia(DisasContext *ctx)
4676 {
4677 #if defined(CONFIG_USER_ONLY)
4678     GEN_PRIV(ctx);
4679 #else
4680     CHK_HV(ctx);
4681 
4682     gen_helper_tlbia(tcg_env);
4683 #endif  /* defined(CONFIG_USER_ONLY) */
4684 }
4685 
4686 /* tlbsync */
4687 static void gen_tlbsync(DisasContext *ctx)
4688 {
4689 #if defined(CONFIG_USER_ONLY)
4690     GEN_PRIV(ctx);
4691 #else
4692 
4693     if (ctx->gtse) {
4694         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
4695     } else {
4696         CHK_HV(ctx); /* Else hypervisor privileged */
4697     }
4698 
4699     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
4700     if (ctx->insns_flags & PPC_BOOKE) {
4701         gen_check_tlb_flush(ctx, true);
4702     }
4703 #endif /* defined(CONFIG_USER_ONLY) */
4704 }
4705 
4706 /***                              External control                         ***/
4707 /* Optional: */
4708 
4709 /* eciwx */
4710 static void gen_eciwx(DisasContext *ctx)
4711 {
4712     TCGv t0;
4713     /* Should check EAR[E] ! */
4714     gen_set_access_type(ctx, ACCESS_EXT);
4715     t0 = tcg_temp_new();
4716     gen_addr_reg_index(ctx, t0);
4717     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4718                        DEF_MEMOP(MO_UL | MO_ALIGN));
4719 }
4720 
4721 /* ecowx */
4722 static void gen_ecowx(DisasContext *ctx)
4723 {
4724     TCGv t0;
4725     /* Should check EAR[E] ! */
4726     gen_set_access_type(ctx, ACCESS_EXT);
4727     t0 = tcg_temp_new();
4728     gen_addr_reg_index(ctx, t0);
4729     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4730                        DEF_MEMOP(MO_UL | MO_ALIGN));
4731 }
4732 
4733 /* 602 - 603 - G2 TLB management */
4734 
4735 /* tlbld */
4736 static void gen_tlbld_6xx(DisasContext *ctx)
4737 {
4738 #if defined(CONFIG_USER_ONLY)
4739     GEN_PRIV(ctx);
4740 #else
4741     CHK_SV(ctx);
4742     gen_helper_6xx_tlbd(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4743 #endif /* defined(CONFIG_USER_ONLY) */
4744 }
4745 
4746 /* tlbli */
4747 static void gen_tlbli_6xx(DisasContext *ctx)
4748 {
4749 #if defined(CONFIG_USER_ONLY)
4750     GEN_PRIV(ctx);
4751 #else
4752     CHK_SV(ctx);
4753     gen_helper_6xx_tlbi(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4754 #endif /* defined(CONFIG_USER_ONLY) */
4755 }
4756 
4757 /* BookE specific instructions */
4758 
4759 /* XXX: not implemented on 440 ? */
4760 static void gen_mfapidi(DisasContext *ctx)
4761 {
4762     /* XXX: TODO */
4763     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4764 }
4765 
4766 /* XXX: not implemented on 440 ? */
4767 static void gen_tlbiva(DisasContext *ctx)
4768 {
4769 #if defined(CONFIG_USER_ONLY)
4770     GEN_PRIV(ctx);
4771 #else
4772     TCGv t0;
4773 
4774     CHK_SV(ctx);
4775     t0 = tcg_temp_new();
4776     gen_addr_reg_index(ctx, t0);
4777     gen_helper_tlbiva(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4778 #endif /* defined(CONFIG_USER_ONLY) */
4779 }
4780 
4781 /* All 405 MAC instructions are translated here */
4782 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
4783                                         int ra, int rb, int rt, int Rc)
4784 {
4785     TCGv t0, t1;
4786 
4787     t0 = tcg_temp_new();
4788     t1 = tcg_temp_new();
4789 
4790     switch (opc3 & 0x0D) {
4791     case 0x05:
4792         /* macchw    - macchw.    - macchwo   - macchwo.   */
4793         /* macchws   - macchws.   - macchwso  - macchwso.  */
4794         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
4795         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
4796         /* mulchw - mulchw. */
4797         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4798         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4799         tcg_gen_ext16s_tl(t1, t1);
4800         break;
4801     case 0x04:
4802         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
4803         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
4804         /* mulchwu - mulchwu. */
4805         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4806         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4807         tcg_gen_ext16u_tl(t1, t1);
4808         break;
4809     case 0x01:
4810         /* machhw    - machhw.    - machhwo   - machhwo.   */
4811         /* machhws   - machhws.   - machhwso  - machhwso.  */
4812         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
4813         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
4814         /* mulhhw - mulhhw. */
4815         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
4816         tcg_gen_ext16s_tl(t0, t0);
4817         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4818         tcg_gen_ext16s_tl(t1, t1);
4819         break;
4820     case 0x00:
4821         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
4822         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
4823         /* mulhhwu - mulhhwu. */
4824         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
4825         tcg_gen_ext16u_tl(t0, t0);
4826         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4827         tcg_gen_ext16u_tl(t1, t1);
4828         break;
4829     case 0x0D:
4830         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
4831         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
4832         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
4833         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
4834         /* mullhw - mullhw. */
4835         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4836         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
4837         break;
4838     case 0x0C:
4839         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
4840         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
4841         /* mullhwu - mullhwu. */
4842         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4843         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
4844         break;
4845     }
4846     if (opc2 & 0x04) {
4847         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
4848         tcg_gen_mul_tl(t1, t0, t1);
4849         if (opc2 & 0x02) {
4850             /* nmultiply-and-accumulate (0x0E) */
4851             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
4852         } else {
4853             /* multiply-and-accumulate (0x0C) */
4854             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
4855         }
4856 
4857         if (opc3 & 0x12) {
4858             /* Check overflow and/or saturate */
4859             TCGLabel *l1 = gen_new_label();
4860 
4861             if (opc3 & 0x10) {
4862                 /* Start with XER OV disabled, the most likely case */
4863                 tcg_gen_movi_tl(cpu_ov, 0);
4864             }
4865             if (opc3 & 0x01) {
4866                 /* Signed */
4867                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
4868                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
4869                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
4870                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
4871                 if (opc3 & 0x02) {
4872                     /* Saturate */
4873                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
4874                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
4875                 }
4876             } else {
4877                 /* Unsigned */
4878                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
4879                 if (opc3 & 0x02) {
4880                     /* Saturate */
4881                     tcg_gen_movi_tl(t0, UINT32_MAX);
4882                 }
4883             }
4884             if (opc3 & 0x10) {
4885                 /* Check overflow */
4886                 tcg_gen_movi_tl(cpu_ov, 1);
4887                 tcg_gen_movi_tl(cpu_so, 1);
4888             }
4889             gen_set_label(l1);
4890             tcg_gen_mov_tl(cpu_gpr[rt], t0);
4891         }
4892     } else {
4893         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
4894     }
4895     if (unlikely(Rc) != 0) {
4896         /* Update Rc0 */
4897         gen_set_Rc0(ctx, cpu_gpr[rt]);
4898     }
4899 }
4900 
4901 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
4902 static void glue(gen_, name)(DisasContext *ctx)                               \
4903 {                                                                             \
4904     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
4905                          rD(ctx->opcode), Rc(ctx->opcode));                   \
4906 }
4907 
4908 /* macchw    - macchw.    */
4909 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
4910 /* macchwo   - macchwo.   */
4911 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
4912 /* macchws   - macchws.   */
4913 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
4914 /* macchwso  - macchwso.  */
4915 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
4916 /* macchwsu  - macchwsu.  */
4917 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
4918 /* macchwsuo - macchwsuo. */
4919 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
4920 /* macchwu   - macchwu.   */
4921 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
4922 /* macchwuo  - macchwuo.  */
4923 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
4924 /* machhw    - machhw.    */
4925 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
4926 /* machhwo   - machhwo.   */
4927 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
4928 /* machhws   - machhws.   */
4929 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
4930 /* machhwso  - machhwso.  */
4931 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
4932 /* machhwsu  - machhwsu.  */
4933 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
4934 /* machhwsuo - machhwsuo. */
4935 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
4936 /* machhwu   - machhwu.   */
4937 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
4938 /* machhwuo  - machhwuo.  */
4939 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
4940 /* maclhw    - maclhw.    */
4941 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
4942 /* maclhwo   - maclhwo.   */
4943 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
4944 /* maclhws   - maclhws.   */
4945 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
4946 /* maclhwso  - maclhwso.  */
4947 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
4948 /* maclhwu   - maclhwu.   */
4949 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
4950 /* maclhwuo  - maclhwuo.  */
4951 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
4952 /* maclhwsu  - maclhwsu.  */
4953 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
4954 /* maclhwsuo - maclhwsuo. */
4955 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
4956 /* nmacchw   - nmacchw.   */
4957 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
4958 /* nmacchwo  - nmacchwo.  */
4959 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
4960 /* nmacchws  - nmacchws.  */
4961 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
4962 /* nmacchwso - nmacchwso. */
4963 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
4964 /* nmachhw   - nmachhw.   */
4965 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
4966 /* nmachhwo  - nmachhwo.  */
4967 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
4968 /* nmachhws  - nmachhws.  */
4969 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
4970 /* nmachhwso - nmachhwso. */
4971 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
4972 /* nmaclhw   - nmaclhw.   */
4973 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
4974 /* nmaclhwo  - nmaclhwo.  */
4975 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
4976 /* nmaclhws  - nmaclhws.  */
4977 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
4978 /* nmaclhwso - nmaclhwso. */
4979 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
4980 
4981 /* mulchw  - mulchw.  */
4982 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
4983 /* mulchwu - mulchwu. */
4984 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
4985 /* mulhhw  - mulhhw.  */
4986 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
4987 /* mulhhwu - mulhhwu. */
4988 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
4989 /* mullhw  - mullhw.  */
4990 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
4991 /* mullhwu - mullhwu. */
4992 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
4993 
4994 /* mfdcr */
4995 static void gen_mfdcr(DisasContext *ctx)
4996 {
4997 #if defined(CONFIG_USER_ONLY)
4998     GEN_PRIV(ctx);
4999 #else
5000     TCGv dcrn;
5001 
5002     CHK_SV(ctx);
5003     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5004     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env, dcrn);
5005 #endif /* defined(CONFIG_USER_ONLY) */
5006 }
5007 
5008 /* mtdcr */
5009 static void gen_mtdcr(DisasContext *ctx)
5010 {
5011 #if defined(CONFIG_USER_ONLY)
5012     GEN_PRIV(ctx);
5013 #else
5014     TCGv dcrn;
5015 
5016     CHK_SV(ctx);
5017     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5018     gen_helper_store_dcr(tcg_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5019 #endif /* defined(CONFIG_USER_ONLY) */
5020 }
5021 
5022 /* mfdcrx */
5023 /* XXX: not implemented on 440 ? */
5024 static void gen_mfdcrx(DisasContext *ctx)
5025 {
5026 #if defined(CONFIG_USER_ONLY)
5027     GEN_PRIV(ctx);
5028 #else
5029     CHK_SV(ctx);
5030     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env,
5031                         cpu_gpr[rA(ctx->opcode)]);
5032     /* Note: Rc update flag set leads to undefined state of Rc0 */
5033 #endif /* defined(CONFIG_USER_ONLY) */
5034 }
5035 
5036 /* mtdcrx */
5037 /* XXX: not implemented on 440 ? */
5038 static void gen_mtdcrx(DisasContext *ctx)
5039 {
5040 #if defined(CONFIG_USER_ONLY)
5041     GEN_PRIV(ctx);
5042 #else
5043     CHK_SV(ctx);
5044     gen_helper_store_dcr(tcg_env, cpu_gpr[rA(ctx->opcode)],
5045                          cpu_gpr[rS(ctx->opcode)]);
5046     /* Note: Rc update flag set leads to undefined state of Rc0 */
5047 #endif /* defined(CONFIG_USER_ONLY) */
5048 }
5049 
5050 /* dccci */
5051 static void gen_dccci(DisasContext *ctx)
5052 {
5053     CHK_SV(ctx);
5054     /* interpreted as no-op */
5055 }
5056 
5057 /* dcread */
5058 static void gen_dcread(DisasContext *ctx)
5059 {
5060 #if defined(CONFIG_USER_ONLY)
5061     GEN_PRIV(ctx);
5062 #else
5063     TCGv EA, val;
5064 
5065     CHK_SV(ctx);
5066     gen_set_access_type(ctx, ACCESS_CACHE);
5067     EA = tcg_temp_new();
5068     gen_addr_reg_index(ctx, EA);
5069     val = tcg_temp_new();
5070     gen_qemu_ld32u(ctx, val, EA);
5071     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5072 #endif /* defined(CONFIG_USER_ONLY) */
5073 }
5074 
5075 /* icbt */
5076 static void gen_icbt_40x(DisasContext *ctx)
5077 {
5078     /*
5079      * interpreted as no-op
5080      * XXX: specification say this is treated as a load by the MMU but
5081      *      does not generate any exception
5082      */
5083 }
5084 
5085 /* iccci */
5086 static void gen_iccci(DisasContext *ctx)
5087 {
5088     CHK_SV(ctx);
5089     /* interpreted as no-op */
5090 }
5091 
5092 /* icread */
5093 static void gen_icread(DisasContext *ctx)
5094 {
5095     CHK_SV(ctx);
5096     /* interpreted as no-op */
5097 }
5098 
5099 /* rfci (supervisor only) */
5100 static void gen_rfci_40x(DisasContext *ctx)
5101 {
5102 #if defined(CONFIG_USER_ONLY)
5103     GEN_PRIV(ctx);
5104 #else
5105     CHK_SV(ctx);
5106     /* Restore CPU state */
5107     gen_helper_40x_rfci(tcg_env);
5108     ctx->base.is_jmp = DISAS_EXIT;
5109 #endif /* defined(CONFIG_USER_ONLY) */
5110 }
5111 
5112 static void gen_rfci(DisasContext *ctx)
5113 {
5114 #if defined(CONFIG_USER_ONLY)
5115     GEN_PRIV(ctx);
5116 #else
5117     CHK_SV(ctx);
5118     /* Restore CPU state */
5119     gen_helper_rfci(tcg_env);
5120     ctx->base.is_jmp = DISAS_EXIT;
5121 #endif /* defined(CONFIG_USER_ONLY) */
5122 }
5123 
5124 /* BookE specific */
5125 
5126 /* XXX: not implemented on 440 ? */
5127 static void gen_rfdi(DisasContext *ctx)
5128 {
5129 #if defined(CONFIG_USER_ONLY)
5130     GEN_PRIV(ctx);
5131 #else
5132     CHK_SV(ctx);
5133     /* Restore CPU state */
5134     gen_helper_rfdi(tcg_env);
5135     ctx->base.is_jmp = DISAS_EXIT;
5136 #endif /* defined(CONFIG_USER_ONLY) */
5137 }
5138 
5139 /* XXX: not implemented on 440 ? */
5140 static void gen_rfmci(DisasContext *ctx)
5141 {
5142 #if defined(CONFIG_USER_ONLY)
5143     GEN_PRIV(ctx);
5144 #else
5145     CHK_SV(ctx);
5146     /* Restore CPU state */
5147     gen_helper_rfmci(tcg_env);
5148     ctx->base.is_jmp = DISAS_EXIT;
5149 #endif /* defined(CONFIG_USER_ONLY) */
5150 }
5151 
5152 /* TLB management - PowerPC 405 implementation */
5153 
5154 /* tlbre */
5155 static void gen_tlbre_40x(DisasContext *ctx)
5156 {
5157 #if defined(CONFIG_USER_ONLY)
5158     GEN_PRIV(ctx);
5159 #else
5160     CHK_SV(ctx);
5161     switch (rB(ctx->opcode)) {
5162     case 0:
5163         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], tcg_env,
5164                                 cpu_gpr[rA(ctx->opcode)]);
5165         break;
5166     case 1:
5167         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], tcg_env,
5168                                 cpu_gpr[rA(ctx->opcode)]);
5169         break;
5170     default:
5171         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5172         break;
5173     }
5174 #endif /* defined(CONFIG_USER_ONLY) */
5175 }
5176 
5177 /* tlbsx - tlbsx. */
5178 static void gen_tlbsx_40x(DisasContext *ctx)
5179 {
5180 #if defined(CONFIG_USER_ONLY)
5181     GEN_PRIV(ctx);
5182 #else
5183     TCGv t0;
5184 
5185     CHK_SV(ctx);
5186     t0 = tcg_temp_new();
5187     gen_addr_reg_index(ctx, t0);
5188     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5189     if (Rc(ctx->opcode)) {
5190         TCGLabel *l1 = gen_new_label();
5191         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5192         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5193         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5194         gen_set_label(l1);
5195     }
5196 #endif /* defined(CONFIG_USER_ONLY) */
5197 }
5198 
5199 /* tlbwe */
5200 static void gen_tlbwe_40x(DisasContext *ctx)
5201 {
5202 #if defined(CONFIG_USER_ONLY)
5203     GEN_PRIV(ctx);
5204 #else
5205     CHK_SV(ctx);
5206 
5207     switch (rB(ctx->opcode)) {
5208     case 0:
5209         gen_helper_4xx_tlbwe_hi(tcg_env, cpu_gpr[rA(ctx->opcode)],
5210                                 cpu_gpr[rS(ctx->opcode)]);
5211         break;
5212     case 1:
5213         gen_helper_4xx_tlbwe_lo(tcg_env, cpu_gpr[rA(ctx->opcode)],
5214                                 cpu_gpr[rS(ctx->opcode)]);
5215         break;
5216     default:
5217         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5218         break;
5219     }
5220 #endif /* defined(CONFIG_USER_ONLY) */
5221 }
5222 
5223 /* TLB management - PowerPC 440 implementation */
5224 
5225 /* tlbre */
5226 static void gen_tlbre_440(DisasContext *ctx)
5227 {
5228 #if defined(CONFIG_USER_ONLY)
5229     GEN_PRIV(ctx);
5230 #else
5231     CHK_SV(ctx);
5232 
5233     switch (rB(ctx->opcode)) {
5234     case 0:
5235     case 1:
5236     case 2:
5237         {
5238             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5239             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], tcg_env,
5240                                  t0, cpu_gpr[rA(ctx->opcode)]);
5241         }
5242         break;
5243     default:
5244         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5245         break;
5246     }
5247 #endif /* defined(CONFIG_USER_ONLY) */
5248 }
5249 
5250 /* tlbsx - tlbsx. */
5251 static void gen_tlbsx_440(DisasContext *ctx)
5252 {
5253 #if defined(CONFIG_USER_ONLY)
5254     GEN_PRIV(ctx);
5255 #else
5256     TCGv t0;
5257 
5258     CHK_SV(ctx);
5259     t0 = tcg_temp_new();
5260     gen_addr_reg_index(ctx, t0);
5261     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5262     if (Rc(ctx->opcode)) {
5263         TCGLabel *l1 = gen_new_label();
5264         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5265         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5266         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5267         gen_set_label(l1);
5268     }
5269 #endif /* defined(CONFIG_USER_ONLY) */
5270 }
5271 
5272 /* tlbwe */
5273 static void gen_tlbwe_440(DisasContext *ctx)
5274 {
5275 #if defined(CONFIG_USER_ONLY)
5276     GEN_PRIV(ctx);
5277 #else
5278     CHK_SV(ctx);
5279     switch (rB(ctx->opcode)) {
5280     case 0:
5281     case 1:
5282     case 2:
5283         {
5284             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5285             gen_helper_440_tlbwe(tcg_env, t0, cpu_gpr[rA(ctx->opcode)],
5286                                  cpu_gpr[rS(ctx->opcode)]);
5287         }
5288         break;
5289     default:
5290         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5291         break;
5292     }
5293 #endif /* defined(CONFIG_USER_ONLY) */
5294 }
5295 
5296 /* TLB management - PowerPC BookE 2.06 implementation */
5297 
5298 /* tlbre */
5299 static void gen_tlbre_booke206(DisasContext *ctx)
5300 {
5301  #if defined(CONFIG_USER_ONLY)
5302     GEN_PRIV(ctx);
5303 #else
5304    CHK_SV(ctx);
5305     gen_helper_booke206_tlbre(tcg_env);
5306 #endif /* defined(CONFIG_USER_ONLY) */
5307 }
5308 
5309 /* tlbsx - tlbsx. */
5310 static void gen_tlbsx_booke206(DisasContext *ctx)
5311 {
5312 #if defined(CONFIG_USER_ONLY)
5313     GEN_PRIV(ctx);
5314 #else
5315     TCGv t0;
5316 
5317     CHK_SV(ctx);
5318     if (rA(ctx->opcode)) {
5319         t0 = tcg_temp_new();
5320         tcg_gen_add_tl(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5321     } else {
5322         t0 = cpu_gpr[rB(ctx->opcode)];
5323     }
5324     gen_helper_booke206_tlbsx(tcg_env, t0);
5325 #endif /* defined(CONFIG_USER_ONLY) */
5326 }
5327 
5328 /* tlbwe */
5329 static void gen_tlbwe_booke206(DisasContext *ctx)
5330 {
5331 #if defined(CONFIG_USER_ONLY)
5332     GEN_PRIV(ctx);
5333 #else
5334     CHK_SV(ctx);
5335     gen_helper_booke206_tlbwe(tcg_env);
5336 #endif /* defined(CONFIG_USER_ONLY) */
5337 }
5338 
5339 static void gen_tlbivax_booke206(DisasContext *ctx)
5340 {
5341 #if defined(CONFIG_USER_ONLY)
5342     GEN_PRIV(ctx);
5343 #else
5344     TCGv t0;
5345 
5346     CHK_SV(ctx);
5347     t0 = tcg_temp_new();
5348     gen_addr_reg_index(ctx, t0);
5349     gen_helper_booke206_tlbivax(tcg_env, t0);
5350 #endif /* defined(CONFIG_USER_ONLY) */
5351 }
5352 
5353 static void gen_tlbilx_booke206(DisasContext *ctx)
5354 {
5355 #if defined(CONFIG_USER_ONLY)
5356     GEN_PRIV(ctx);
5357 #else
5358     TCGv t0;
5359 
5360     CHK_SV(ctx);
5361     t0 = tcg_temp_new();
5362     gen_addr_reg_index(ctx, t0);
5363 
5364     switch ((ctx->opcode >> 21) & 0x3) {
5365     case 0:
5366         gen_helper_booke206_tlbilx0(tcg_env, t0);
5367         break;
5368     case 1:
5369         gen_helper_booke206_tlbilx1(tcg_env, t0);
5370         break;
5371     case 3:
5372         gen_helper_booke206_tlbilx3(tcg_env, t0);
5373         break;
5374     default:
5375         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5376         break;
5377     }
5378 #endif /* defined(CONFIG_USER_ONLY) */
5379 }
5380 
5381 /* wrtee */
5382 static void gen_wrtee(DisasContext *ctx)
5383 {
5384 #if defined(CONFIG_USER_ONLY)
5385     GEN_PRIV(ctx);
5386 #else
5387     TCGv t0;
5388 
5389     CHK_SV(ctx);
5390     t0 = tcg_temp_new();
5391     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5392     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5393     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5394     gen_ppc_maybe_interrupt(ctx);
5395     /*
5396      * Stop translation to have a chance to raise an exception if we
5397      * just set msr_ee to 1
5398      */
5399     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5400 #endif /* defined(CONFIG_USER_ONLY) */
5401 }
5402 
5403 /* wrteei */
5404 static void gen_wrteei(DisasContext *ctx)
5405 {
5406 #if defined(CONFIG_USER_ONLY)
5407     GEN_PRIV(ctx);
5408 #else
5409     CHK_SV(ctx);
5410     if (ctx->opcode & 0x00008000) {
5411         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
5412         gen_ppc_maybe_interrupt(ctx);
5413         /* Stop translation to have a chance to raise an exception */
5414         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5415     } else {
5416         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5417     }
5418 #endif /* defined(CONFIG_USER_ONLY) */
5419 }
5420 
5421 /* PowerPC 440 specific instructions */
5422 
5423 /* dlmzb */
5424 static void gen_dlmzb(DisasContext *ctx)
5425 {
5426     TCGv_i32 t0 = tcg_constant_i32(Rc(ctx->opcode));
5427     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], tcg_env,
5428                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
5429 }
5430 
5431 /* icbt */
5432 static void gen_icbt_440(DisasContext *ctx)
5433 {
5434     /*
5435      * interpreted as no-op
5436      * XXX: specification say this is treated as a load by the MMU but
5437      *      does not generate any exception
5438      */
5439 }
5440 
5441 static void gen_tbegin(DisasContext *ctx)
5442 {
5443     if (unlikely(!ctx->tm_enabled)) {
5444         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5445         return;
5446     }
5447     gen_helper_tbegin(tcg_env);
5448 }
5449 
5450 #define GEN_TM_NOOP(name)                                      \
5451 static inline void gen_##name(DisasContext *ctx)               \
5452 {                                                              \
5453     if (unlikely(!ctx->tm_enabled)) {                          \
5454         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5455         return;                                                \
5456     }                                                          \
5457     /*                                                         \
5458      * Because tbegin always fails in QEMU, these user         \
5459      * space instructions all have a simple implementation:    \
5460      *                                                         \
5461      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
5462      *           = 0b0 || 0b00    || 0b0                       \
5463      */                                                        \
5464     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5465 }
5466 
5467 GEN_TM_NOOP(tend);
5468 GEN_TM_NOOP(tabort);
5469 GEN_TM_NOOP(tabortwc);
5470 GEN_TM_NOOP(tabortwci);
5471 GEN_TM_NOOP(tabortdc);
5472 GEN_TM_NOOP(tabortdci);
5473 GEN_TM_NOOP(tsr);
5474 
5475 static inline void gen_cp_abort(DisasContext *ctx)
5476 {
5477     /* Do Nothing */
5478 }
5479 
5480 #define GEN_CP_PASTE_NOOP(name)                           \
5481 static inline void gen_##name(DisasContext *ctx)          \
5482 {                                                         \
5483     /*                                                    \
5484      * Generate invalid exception until we have an        \
5485      * implementation of the copy paste facility          \
5486      */                                                   \
5487     gen_invalid(ctx);                                     \
5488 }
5489 
5490 GEN_CP_PASTE_NOOP(copy)
5491 GEN_CP_PASTE_NOOP(paste)
5492 
5493 static void gen_tcheck(DisasContext *ctx)
5494 {
5495     if (unlikely(!ctx->tm_enabled)) {
5496         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5497         return;
5498     }
5499     /*
5500      * Because tbegin always fails, the tcheck implementation is
5501      * simple:
5502      *
5503      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
5504      *         = 0b1 || 0b00 || 0b0
5505      */
5506     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
5507 }
5508 
5509 #if defined(CONFIG_USER_ONLY)
5510 #define GEN_TM_PRIV_NOOP(name)                                 \
5511 static inline void gen_##name(DisasContext *ctx)               \
5512 {                                                              \
5513     gen_priv_opc(ctx);                                         \
5514 }
5515 
5516 #else
5517 
5518 #define GEN_TM_PRIV_NOOP(name)                                 \
5519 static inline void gen_##name(DisasContext *ctx)               \
5520 {                                                              \
5521     CHK_SV(ctx);                                               \
5522     if (unlikely(!ctx->tm_enabled)) {                          \
5523         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5524         return;                                                \
5525     }                                                          \
5526     /*                                                         \
5527      * Because tbegin always fails, the implementation is      \
5528      * simple:                                                 \
5529      *                                                         \
5530      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
5531      *         = 0b0 || 0b00 | 0b0                             \
5532      */                                                        \
5533     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5534 }
5535 
5536 #endif
5537 
5538 GEN_TM_PRIV_NOOP(treclaim);
5539 GEN_TM_PRIV_NOOP(trechkpt);
5540 
5541 static inline void get_fpr(TCGv_i64 dst, int regno)
5542 {
5543     tcg_gen_ld_i64(dst, tcg_env, fpr_offset(regno));
5544 }
5545 
5546 static inline void set_fpr(int regno, TCGv_i64 src)
5547 {
5548     tcg_gen_st_i64(src, tcg_env, fpr_offset(regno));
5549     /*
5550      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
5551      * corresponding to the target FPR was undefined. However,
5552      * most (if not all) real hardware were setting the result to 0.
5553      * Starting at ISA v3.1, the result for doubleword 1 is now defined
5554      * to be 0.
5555      */
5556     tcg_gen_st_i64(tcg_constant_i64(0), tcg_env, vsr64_offset(regno, false));
5557 }
5558 
5559 /*
5560  * Helpers for decodetree used by !function for decoding arguments.
5561  */
5562 static int times_2(DisasContext *ctx, int x)
5563 {
5564     return x * 2;
5565 }
5566 
5567 static int times_4(DisasContext *ctx, int x)
5568 {
5569     return x * 4;
5570 }
5571 
5572 static int times_16(DisasContext *ctx, int x)
5573 {
5574     return x * 16;
5575 }
5576 
5577 static int64_t dw_compose_ea(DisasContext *ctx, int x)
5578 {
5579     return deposit64(0xfffffffffffffe00, 3, 6, x);
5580 }
5581 
5582 /*
5583  * Helpers for trans_* functions to check for specific insns flags.
5584  * Use token pasting to ensure that we use the proper flag with the
5585  * proper variable.
5586  */
5587 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
5588     do {                                                \
5589         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
5590             return false;                               \
5591         }                                               \
5592     } while (0)
5593 
5594 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
5595     do {                                                \
5596         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
5597             return false;                               \
5598         }                                               \
5599     } while (0)
5600 
5601 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
5602 #if TARGET_LONG_BITS == 32
5603 # define REQUIRE_64BIT(CTX)  return false
5604 #else
5605 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
5606 #endif
5607 
5608 #define REQUIRE_VECTOR(CTX)                             \
5609     do {                                                \
5610         if (unlikely(!(CTX)->altivec_enabled)) {        \
5611             gen_exception((CTX), POWERPC_EXCP_VPU);     \
5612             return true;                                \
5613         }                                               \
5614     } while (0)
5615 
5616 #define REQUIRE_VSX(CTX)                                \
5617     do {                                                \
5618         if (unlikely(!(CTX)->vsx_enabled)) {            \
5619             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
5620             return true;                                \
5621         }                                               \
5622     } while (0)
5623 
5624 #define REQUIRE_FPU(ctx)                                \
5625     do {                                                \
5626         if (unlikely(!(ctx)->fpu_enabled)) {            \
5627             gen_exception((ctx), POWERPC_EXCP_FPU);     \
5628             return true;                                \
5629         }                                               \
5630     } while (0)
5631 
5632 #if !defined(CONFIG_USER_ONLY)
5633 #define REQUIRE_SV(CTX)             \
5634     do {                            \
5635         if (unlikely((CTX)->pr)) {  \
5636             gen_priv_opc(CTX);      \
5637             return true;            \
5638         }                           \
5639     } while (0)
5640 
5641 #define REQUIRE_HV(CTX)                             \
5642     do {                                            \
5643         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
5644             gen_priv_opc(CTX);                      \
5645             return true;                            \
5646         }                                           \
5647     } while (0)
5648 #else
5649 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5650 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5651 #endif
5652 
5653 /*
5654  * Helpers for implementing sets of trans_* functions.
5655  * Defer the implementation of NAME to FUNC, with optional extra arguments.
5656  */
5657 #define TRANS(NAME, FUNC, ...) \
5658     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5659     { return FUNC(ctx, a, __VA_ARGS__); }
5660 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
5661     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5662     {                                                          \
5663         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
5664         return FUNC(ctx, a, __VA_ARGS__);                      \
5665     }
5666 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5667     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5668     {                                                          \
5669         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5670         return FUNC(ctx, a, __VA_ARGS__);                      \
5671     }
5672 
5673 #define TRANS64(NAME, FUNC, ...) \
5674     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5675     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
5676 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5677     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5678     {                                                          \
5679         REQUIRE_64BIT(ctx);                                    \
5680         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5681         return FUNC(ctx, a, __VA_ARGS__);                      \
5682     }
5683 
5684 /* TODO: More TRANS* helpers for extra insn_flags checks. */
5685 
5686 
5687 #include "decode-insn32.c.inc"
5688 #include "decode-insn64.c.inc"
5689 #include "power8-pmu-regs.c.inc"
5690 
5691 /*
5692  * Incorporate CIA into the constant when R=1.
5693  * Validate that when R=1, RA=0.
5694  */
5695 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
5696 {
5697     d->rt = a->rt;
5698     d->ra = a->ra;
5699     d->si = a->si;
5700     if (a->r) {
5701         if (unlikely(a->ra != 0)) {
5702             gen_invalid(ctx);
5703             return false;
5704         }
5705         d->si += ctx->cia;
5706     }
5707     return true;
5708 }
5709 
5710 #include "translate/fixedpoint-impl.c.inc"
5711 
5712 #include "translate/fp-impl.c.inc"
5713 
5714 #include "translate/vmx-impl.c.inc"
5715 
5716 #include "translate/vsx-impl.c.inc"
5717 
5718 #include "translate/dfp-impl.c.inc"
5719 
5720 #include "translate/spe-impl.c.inc"
5721 
5722 #include "translate/branch-impl.c.inc"
5723 
5724 #include "translate/processor-ctrl-impl.c.inc"
5725 
5726 #include "translate/storage-ctrl-impl.c.inc"
5727 
5728 #include "translate/misc-impl.c.inc"
5729 
5730 #include "translate/bhrb-impl.c.inc"
5731 
5732 /* Handles lfdp */
5733 static void gen_dform39(DisasContext *ctx)
5734 {
5735     if ((ctx->opcode & 0x3) == 0) {
5736         if (ctx->insns_flags2 & PPC2_ISA205) {
5737             return gen_lfdp(ctx);
5738         }
5739     }
5740     return gen_invalid(ctx);
5741 }
5742 
5743 /* Handles stfdp */
5744 static void gen_dform3D(DisasContext *ctx)
5745 {
5746     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
5747         /* stfdp */
5748         if (ctx->insns_flags2 & PPC2_ISA205) {
5749             return gen_stfdp(ctx);
5750         }
5751     }
5752     return gen_invalid(ctx);
5753 }
5754 
5755 #if defined(TARGET_PPC64)
5756 /* brd */
5757 static void gen_brd(DisasContext *ctx)
5758 {
5759     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5760 }
5761 
5762 /* brw */
5763 static void gen_brw(DisasContext *ctx)
5764 {
5765     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5766     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
5767 
5768 }
5769 
5770 /* brh */
5771 static void gen_brh(DisasContext *ctx)
5772 {
5773     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
5774     TCGv_i64 t1 = tcg_temp_new_i64();
5775     TCGv_i64 t2 = tcg_temp_new_i64();
5776 
5777     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
5778     tcg_gen_and_i64(t2, t1, mask);
5779     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
5780     tcg_gen_shli_i64(t1, t1, 8);
5781     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
5782 }
5783 #endif
5784 
5785 static opcode_t opcodes[] = {
5786 #if defined(TARGET_PPC64)
5787 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
5788 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
5789 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
5790 #endif
5791 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
5792 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
5793 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5794 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
5795 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5796 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5797 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5798 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
5799 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
5800 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
5801 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
5802 #if defined(TARGET_PPC64)
5803 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
5804 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
5805 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
5806 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
5807 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
5808 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
5809                PPC_NONE, PPC2_ISA300),
5810 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
5811                PPC_NONE, PPC2_ISA300),
5812 #endif
5813 /* handles lfdp, lxsd, lxssp */
5814 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5815 /* handles stfdp, stxsd, stxssp */
5816 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5817 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5818 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5819 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
5820 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
5821 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
5822 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
5823 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
5824 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5825 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5826 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
5827 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
5828 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
5829 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5830 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5831 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
5832 #if defined(TARGET_PPC64)
5833 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
5834 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
5835 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
5836 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
5837 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
5838 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
5839 #endif
5840 /* ISA v3.0 changed the extended opcode from 62 to 30 */
5841 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
5842 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
5843 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5844 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5845 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
5846 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
5847 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
5848 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
5849 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
5850 #if defined(TARGET_PPC64)
5851 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
5852 #if !defined(CONFIG_USER_ONLY)
5853 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5854 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5855 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5856 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
5857 #endif
5858 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5859 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5860 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5861 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5862 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5863 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
5864 #endif
5865 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5866 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
5867 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
5868 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
5869 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
5870 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
5871 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
5872 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
5873 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
5874 #if defined(TARGET_PPC64)
5875 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
5876 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
5877 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
5878 #endif
5879 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
5880 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
5881 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
5882 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5883 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
5884 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
5885 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5886 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
5887 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5888 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
5889 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5890 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5891 GEN_HANDLER_E(dcblc, 0x1F, 0x06, 0x0c, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5892 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
5893 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5894 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
5895 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
5896 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
5897 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
5898 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5899 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
5900 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
5901 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
5902 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
5903 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
5904 #if defined(TARGET_PPC64)
5905 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
5906 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
5907              PPC_SEGMENT_64B),
5908 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
5909 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
5910              PPC_SEGMENT_64B),
5911 #endif
5912 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
5913 /*
5914  * XXX Those instructions will need to be handled differently for
5915  * different ISA versions
5916  */
5917 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
5918 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
5919 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
5920 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
5921 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
5922 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
5923 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
5924 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
5925 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
5926 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
5927 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
5928 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
5929 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
5930 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
5931 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
5932 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
5933 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
5934 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
5935 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
5936 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
5937 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
5938 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
5939 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
5940 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
5941 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
5942 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
5943 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
5944                PPC_NONE, PPC2_BOOKE206),
5945 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
5946                PPC_NONE, PPC2_BOOKE206),
5947 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
5948                PPC_NONE, PPC2_BOOKE206),
5949 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
5950                PPC_NONE, PPC2_BOOKE206),
5951 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
5952                PPC_NONE, PPC2_BOOKE206),
5953 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
5954 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
5955 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
5956 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
5957                PPC_BOOKE, PPC2_BOOKE206),
5958 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
5959              PPC_440_SPEC),
5960 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
5961 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
5962 
5963 #if defined(TARGET_PPC64)
5964 #undef GEN_PPC64_R2
5965 #undef GEN_PPC64_R4
5966 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
5967 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
5968 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
5969              PPC_64B)
5970 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
5971 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
5972 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
5973              PPC_64B),                                                        \
5974 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
5975              PPC_64B),                                                        \
5976 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
5977              PPC_64B)
5978 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
5979 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
5980 GEN_PPC64_R4(rldic, 0x1E, 0x04),
5981 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
5982 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
5983 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
5984 #endif
5985 
5986 #undef GEN_LDX_E
5987 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
5988 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
5989 
5990 #if defined(TARGET_PPC64)
5991 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
5992 
5993 /* HV/P7 and later only */
5994 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
5995 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
5996 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
5997 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
5998 #endif
5999 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6000 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6001 
6002 /* External PID based load */
6003 #undef GEN_LDEPX
6004 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
6005 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6006               0x00000001, PPC_NONE, PPC2_BOOKE206),
6007 
6008 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
6009 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
6010 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
6011 #if defined(TARGET_PPC64)
6012 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
6013 #endif
6014 
6015 #undef GEN_STX_E
6016 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
6017 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
6018 
6019 #if defined(TARGET_PPC64)
6020 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6021 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6022 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6023 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6024 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6025 #endif
6026 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6027 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6028 
6029 #undef GEN_STEPX
6030 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
6031 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6032               0x00000001, PPC_NONE, PPC2_BOOKE206),
6033 
6034 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
6035 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
6036 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
6037 #if defined(TARGET_PPC64)
6038 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
6039 #endif
6040 
6041 #undef GEN_CRLOGIC
6042 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
6043 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6044 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6045 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6046 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6047 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6048 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6049 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6050 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6051 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6052 
6053 #undef GEN_MAC_HANDLER
6054 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
6055 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6056 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6057 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6058 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6059 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6060 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6061 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6062 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6063 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6064 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6065 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6066 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6067 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6068 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6069 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6070 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6071 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6072 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6073 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6074 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6075 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6076 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6077 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6078 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6079 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6080 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6081 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6082 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6083 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6084 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6085 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6086 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6087 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6088 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6089 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6090 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6091 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6092 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6093 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6094 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6095 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6096 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6097 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6098 
6099 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6100                PPC_NONE, PPC2_TM),
6101 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
6102                PPC_NONE, PPC2_TM),
6103 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6104                PPC_NONE, PPC2_TM),
6105 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6106                PPC_NONE, PPC2_TM),
6107 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6108                PPC_NONE, PPC2_TM),
6109 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6110                PPC_NONE, PPC2_TM),
6111 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6112                PPC_NONE, PPC2_TM),
6113 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6114                PPC_NONE, PPC2_TM),
6115 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6116                PPC_NONE, PPC2_TM),
6117 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6118                PPC_NONE, PPC2_TM),
6119 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6120                PPC_NONE, PPC2_TM),
6121 
6122 #include "translate/fp-ops.c.inc"
6123 
6124 #include "translate/vmx-ops.c.inc"
6125 
6126 #include "translate/vsx-ops.c.inc"
6127 
6128 #include "translate/spe-ops.c.inc"
6129 };
6130 
6131 /*****************************************************************************/
6132 /* Opcode types */
6133 enum {
6134     PPC_DIRECT   = 0, /* Opcode routine        */
6135     PPC_INDIRECT = 1, /* Indirect opcode table */
6136 };
6137 
6138 #define PPC_OPCODE_MASK 0x3
6139 
6140 static inline int is_indirect_opcode(void *handler)
6141 {
6142     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
6143 }
6144 
6145 static inline opc_handler_t **ind_table(void *handler)
6146 {
6147     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
6148 }
6149 
6150 /* Instruction table creation */
6151 /* Opcodes tables creation */
6152 static void fill_new_table(opc_handler_t **table, int len)
6153 {
6154     int i;
6155 
6156     for (i = 0; i < len; i++) {
6157         table[i] = &invalid_handler;
6158     }
6159 }
6160 
6161 static int create_new_table(opc_handler_t **table, unsigned char idx)
6162 {
6163     opc_handler_t **tmp;
6164 
6165     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
6166     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
6167     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
6168 
6169     return 0;
6170 }
6171 
6172 static int insert_in_table(opc_handler_t **table, unsigned char idx,
6173                             opc_handler_t *handler)
6174 {
6175     if (table[idx] != &invalid_handler) {
6176         return -1;
6177     }
6178     table[idx] = handler;
6179 
6180     return 0;
6181 }
6182 
6183 static int register_direct_insn(opc_handler_t **ppc_opcodes,
6184                                 unsigned char idx, opc_handler_t *handler)
6185 {
6186     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
6187         printf("*** ERROR: opcode %02x already assigned in main "
6188                "opcode table\n", idx);
6189         return -1;
6190     }
6191 
6192     return 0;
6193 }
6194 
6195 static int register_ind_in_table(opc_handler_t **table,
6196                                  unsigned char idx1, unsigned char idx2,
6197                                  opc_handler_t *handler)
6198 {
6199     if (table[idx1] == &invalid_handler) {
6200         if (create_new_table(table, idx1) < 0) {
6201             printf("*** ERROR: unable to create indirect table "
6202                    "idx=%02x\n", idx1);
6203             return -1;
6204         }
6205     } else {
6206         if (!is_indirect_opcode(table[idx1])) {
6207             printf("*** ERROR: idx %02x already assigned to a direct "
6208                    "opcode\n", idx1);
6209             return -1;
6210         }
6211     }
6212     if (handler != NULL &&
6213         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
6214         printf("*** ERROR: opcode %02x already assigned in "
6215                "opcode table %02x\n", idx2, idx1);
6216         return -1;
6217     }
6218 
6219     return 0;
6220 }
6221 
6222 static int register_ind_insn(opc_handler_t **ppc_opcodes,
6223                              unsigned char idx1, unsigned char idx2,
6224                              opc_handler_t *handler)
6225 {
6226     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
6227 }
6228 
6229 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
6230                                 unsigned char idx1, unsigned char idx2,
6231                                 unsigned char idx3, opc_handler_t *handler)
6232 {
6233     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6234         printf("*** ERROR: unable to join indirect table idx "
6235                "[%02x-%02x]\n", idx1, idx2);
6236         return -1;
6237     }
6238     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
6239                               handler) < 0) {
6240         printf("*** ERROR: unable to insert opcode "
6241                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6242         return -1;
6243     }
6244 
6245     return 0;
6246 }
6247 
6248 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
6249                                  unsigned char idx1, unsigned char idx2,
6250                                  unsigned char idx3, unsigned char idx4,
6251                                  opc_handler_t *handler)
6252 {
6253     opc_handler_t **table;
6254 
6255     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6256         printf("*** ERROR: unable to join indirect table idx "
6257                "[%02x-%02x]\n", idx1, idx2);
6258         return -1;
6259     }
6260     table = ind_table(ppc_opcodes[idx1]);
6261     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
6262         printf("*** ERROR: unable to join 2nd-level indirect table idx "
6263                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6264         return -1;
6265     }
6266     table = ind_table(table[idx2]);
6267     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
6268         printf("*** ERROR: unable to insert opcode "
6269                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
6270         return -1;
6271     }
6272     return 0;
6273 }
6274 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
6275 {
6276     if (insn->opc2 != 0xFF) {
6277         if (insn->opc3 != 0xFF) {
6278             if (insn->opc4 != 0xFF) {
6279                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6280                                           insn->opc3, insn->opc4,
6281                                           &insn->handler) < 0) {
6282                     return -1;
6283                 }
6284             } else {
6285                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6286                                          insn->opc3, &insn->handler) < 0) {
6287                     return -1;
6288                 }
6289             }
6290         } else {
6291             if (register_ind_insn(ppc_opcodes, insn->opc1,
6292                                   insn->opc2, &insn->handler) < 0) {
6293                 return -1;
6294             }
6295         }
6296     } else {
6297         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
6298             return -1;
6299         }
6300     }
6301 
6302     return 0;
6303 }
6304 
6305 static int test_opcode_table(opc_handler_t **table, int len)
6306 {
6307     int i, count, tmp;
6308 
6309     for (i = 0, count = 0; i < len; i++) {
6310         /* Consistency fixup */
6311         if (table[i] == NULL) {
6312             table[i] = &invalid_handler;
6313         }
6314         if (table[i] != &invalid_handler) {
6315             if (is_indirect_opcode(table[i])) {
6316                 tmp = test_opcode_table(ind_table(table[i]),
6317                     PPC_CPU_INDIRECT_OPCODES_LEN);
6318                 if (tmp == 0) {
6319                     g_free(table[i]);
6320                     table[i] = &invalid_handler;
6321                 } else {
6322                     count++;
6323                 }
6324             } else {
6325                 count++;
6326             }
6327         }
6328     }
6329 
6330     return count;
6331 }
6332 
6333 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
6334 {
6335     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
6336         printf("*** WARNING: no opcode defined !\n");
6337     }
6338 }
6339 
6340 /*****************************************************************************/
6341 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
6342 {
6343     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
6344     opcode_t *opc;
6345 
6346     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
6347     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
6348         if (((opc->handler.type & pcc->insns_flags) != 0) ||
6349             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
6350             if (register_insn(cpu->opcodes, opc) < 0) {
6351                 error_setg(errp, "ERROR initializing PowerPC instruction "
6352                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
6353                            opc->opc3);
6354                 return;
6355             }
6356         }
6357     }
6358     fix_opcode_tables(cpu->opcodes);
6359     fflush(stdout);
6360     fflush(stderr);
6361 }
6362 
6363 void destroy_ppc_opcodes(PowerPCCPU *cpu)
6364 {
6365     opc_handler_t **table, **table_2;
6366     int i, j, k;
6367 
6368     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
6369         if (cpu->opcodes[i] == &invalid_handler) {
6370             continue;
6371         }
6372         if (is_indirect_opcode(cpu->opcodes[i])) {
6373             table = ind_table(cpu->opcodes[i]);
6374             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
6375                 if (table[j] == &invalid_handler) {
6376                     continue;
6377                 }
6378                 if (is_indirect_opcode(table[j])) {
6379                     table_2 = ind_table(table[j]);
6380                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
6381                         if (table_2[k] != &invalid_handler &&
6382                             is_indirect_opcode(table_2[k])) {
6383                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
6384                                                      ~PPC_INDIRECT));
6385                         }
6386                     }
6387                     g_free((opc_handler_t *)((uintptr_t)table[j] &
6388                                              ~PPC_INDIRECT));
6389                 }
6390             }
6391             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
6392                 ~PPC_INDIRECT));
6393         }
6394     }
6395 }
6396 
6397 int ppc_fixup_cpu(PowerPCCPU *cpu)
6398 {
6399     CPUPPCState *env = &cpu->env;
6400 
6401     /*
6402      * TCG doesn't (yet) emulate some groups of instructions that are
6403      * implemented on some otherwise supported CPUs (e.g. VSX and
6404      * decimal floating point instructions on POWER7).  We remove
6405      * unsupported instruction groups from the cpu state's instruction
6406      * masks and hope the guest can cope.  For at least the pseries
6407      * machine, the unavailability of these instructions can be
6408      * advertised to the guest via the device tree.
6409      */
6410     if ((env->insns_flags & ~PPC_TCG_INSNS)
6411         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
6412         warn_report("Disabling some instructions which are not "
6413                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
6414                     env->insns_flags & ~PPC_TCG_INSNS,
6415                     env->insns_flags2 & ~PPC_TCG_INSNS2);
6416     }
6417     env->insns_flags &= PPC_TCG_INSNS;
6418     env->insns_flags2 &= PPC_TCG_INSNS2;
6419     return 0;
6420 }
6421 
6422 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
6423 {
6424     opc_handler_t **table, *handler;
6425     uint32_t inval;
6426 
6427     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
6428               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6429               ctx->le_mode ? "little" : "big");
6430 
6431     table = cpu->opcodes;
6432     handler = table[opc1(insn)];
6433     if (is_indirect_opcode(handler)) {
6434         table = ind_table(handler);
6435         handler = table[opc2(insn)];
6436         if (is_indirect_opcode(handler)) {
6437             table = ind_table(handler);
6438             handler = table[opc3(insn)];
6439             if (is_indirect_opcode(handler)) {
6440                 table = ind_table(handler);
6441                 handler = table[opc4(insn)];
6442             }
6443         }
6444     }
6445 
6446     /* Is opcode *REALLY* valid ? */
6447     if (unlikely(handler->handler == &gen_invalid)) {
6448         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
6449                       "%02x - %02x - %02x - %02x (%08x) "
6450                       TARGET_FMT_lx "\n",
6451                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6452                       insn, ctx->cia);
6453         return false;
6454     }
6455 
6456     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
6457                  && Rc(insn))) {
6458         inval = handler->inval2;
6459     } else {
6460         inval = handler->inval1;
6461     }
6462 
6463     if (unlikely((insn & inval) != 0)) {
6464         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
6465                       "%02x - %02x - %02x - %02x (%08x) "
6466                       TARGET_FMT_lx "\n", insn & inval,
6467                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6468                       insn, ctx->cia);
6469         return false;
6470     }
6471 
6472     handler->handler(ctx);
6473     return true;
6474 }
6475 
6476 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
6477 {
6478     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6479     CPUPPCState *env = cpu_env(cs);
6480     uint32_t hflags = ctx->base.tb->flags;
6481 
6482     ctx->spr_cb = env->spr_cb;
6483     ctx->pr = (hflags >> HFLAGS_PR) & 1;
6484     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
6485     ctx->dr = (hflags >> HFLAGS_DR) & 1;
6486     ctx->hv = (hflags >> HFLAGS_HV) & 1;
6487     ctx->insns_flags = env->insns_flags;
6488     ctx->insns_flags2 = env->insns_flags2;
6489     ctx->access_type = -1;
6490     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
6491     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
6492     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
6493     ctx->flags = env->flags;
6494 #if defined(TARGET_PPC64)
6495     ctx->excp_model = env->excp_model;
6496     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
6497     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
6498     ctx->has_bhrb = !!(env->flags & POWERPC_FLAG_BHRB);
6499 #endif
6500     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
6501         || env->mmu_model & POWERPC_MMU_64;
6502 
6503     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
6504     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
6505     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
6506     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
6507     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
6508     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
6509     ctx->hr = (hflags >> HFLAGS_HR) & 1;
6510     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
6511     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
6512     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
6513     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
6514     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
6515     ctx->bhrb_enable = (hflags >> HFLAGS_BHRB_ENABLE) & 1;
6516 
6517     ctx->singlestep_enabled = 0;
6518     if ((hflags >> HFLAGS_SE) & 1) {
6519         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
6520         ctx->base.max_insns = 1;
6521     }
6522     if ((hflags >> HFLAGS_BE) & 1) {
6523         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
6524     }
6525 }
6526 
6527 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
6528 {
6529 }
6530 
6531 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
6532 {
6533     tcg_gen_insn_start(dcbase->pc_next);
6534 }
6535 
6536 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
6537 {
6538     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
6539     return opc1(insn) == 1;
6540 }
6541 
6542 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
6543 {
6544     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6545     PowerPCCPU *cpu = POWERPC_CPU(cs);
6546     CPUPPCState *env = cpu_env(cs);
6547     target_ulong pc;
6548     uint32_t insn;
6549     bool ok;
6550 
6551     LOG_DISAS("----------------\n");
6552     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
6553               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
6554 
6555     ctx->cia = pc = ctx->base.pc_next;
6556     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
6557     ctx->base.pc_next = pc += 4;
6558 
6559     if (!is_prefix_insn(ctx, insn)) {
6560         ctx->opcode = insn;
6561         ok = (decode_insn32(ctx, insn) ||
6562               decode_legacy(cpu, ctx, insn));
6563     } else if ((pc & 63) == 0) {
6564         /*
6565          * Power v3.1, section 1.9 Exceptions:
6566          * attempt to execute a prefixed instruction that crosses a
6567          * 64-byte address boundary (system alignment error).
6568          */
6569         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
6570         ok = true;
6571     } else {
6572         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
6573                                              need_byteswap(ctx));
6574         ctx->base.pc_next = pc += 4;
6575         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
6576     }
6577     if (!ok) {
6578         gen_invalid(ctx);
6579     }
6580 
6581     /* End the TB when crossing a page boundary. */
6582     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
6583         ctx->base.is_jmp = DISAS_TOO_MANY;
6584     }
6585 }
6586 
6587 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
6588 {
6589     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6590     DisasJumpType is_jmp = ctx->base.is_jmp;
6591     target_ulong nip = ctx->base.pc_next;
6592 
6593     if (is_jmp == DISAS_NORETURN) {
6594         /* We have already exited the TB. */
6595         return;
6596     }
6597 
6598     /* Honor single stepping. */
6599     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)) {
6600         bool rfi_type = false;
6601 
6602         switch (is_jmp) {
6603         case DISAS_TOO_MANY:
6604         case DISAS_EXIT_UPDATE:
6605         case DISAS_CHAIN_UPDATE:
6606             gen_update_nip(ctx, nip);
6607             break;
6608         case DISAS_EXIT:
6609         case DISAS_CHAIN:
6610             /*
6611              * This is a heuristic, to put it kindly. The rfi class of
6612              * instructions are among the few outside branches that change
6613              * NIP without taking an interrupt. Single step trace interrupts
6614              * do not fire on completion of these instructions.
6615              */
6616             rfi_type = true;
6617             break;
6618         default:
6619             g_assert_not_reached();
6620         }
6621 
6622         gen_debug_exception(ctx, rfi_type);
6623         return;
6624     }
6625 
6626     switch (is_jmp) {
6627     case DISAS_TOO_MANY:
6628         if (use_goto_tb(ctx, nip)) {
6629             pmu_count_insns(ctx);
6630             tcg_gen_goto_tb(0);
6631             gen_update_nip(ctx, nip);
6632             tcg_gen_exit_tb(ctx->base.tb, 0);
6633             break;
6634         }
6635         /* fall through */
6636     case DISAS_CHAIN_UPDATE:
6637         gen_update_nip(ctx, nip);
6638         /* fall through */
6639     case DISAS_CHAIN:
6640         /*
6641          * tcg_gen_lookup_and_goto_ptr will exit the TB if
6642          * CF_NO_GOTO_PTR is set. Count insns now.
6643          */
6644         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
6645             pmu_count_insns(ctx);
6646         }
6647 
6648         tcg_gen_lookup_and_goto_ptr();
6649         break;
6650 
6651     case DISAS_EXIT_UPDATE:
6652         gen_update_nip(ctx, nip);
6653         /* fall through */
6654     case DISAS_EXIT:
6655         pmu_count_insns(ctx);
6656         tcg_gen_exit_tb(NULL, 0);
6657         break;
6658 
6659     default:
6660         g_assert_not_reached();
6661     }
6662 }
6663 
6664 static const TranslatorOps ppc_tr_ops = {
6665     .init_disas_context = ppc_tr_init_disas_context,
6666     .tb_start           = ppc_tr_tb_start,
6667     .insn_start         = ppc_tr_insn_start,
6668     .translate_insn     = ppc_tr_translate_insn,
6669     .tb_stop            = ppc_tr_tb_stop,
6670 };
6671 
6672 void ppc_translate_code(CPUState *cs, TranslationBlock *tb,
6673                         int *max_insns, vaddr pc, void *host_pc)
6674 {
6675     DisasContext ctx;
6676 
6677     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
6678 }
6679