target/ppc: Remove special case for POWERPC_SYSCALL
[qemu.git] / target / ppc / translate.c
blob18f581e4956a76fe75ca60d77ac05966d5046c10
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
35 #include "trace-tcg.h"
36 #include "exec/translator.h"
37 #include "exec/log.h"
38 #include "qemu/atomic128.h"
39 #include "spr_tcg.h"
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 #define GDBSTUB_SINGLE_STEP 0x4
48 /* Include definitions for instructions classes and implementations flags */
49 /* #define PPC_DEBUG_DISAS */
50 /* #define DO_PPC_STATISTICS */
52 #ifdef PPC_DEBUG_DISAS
53 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
54 #else
55 # define LOG_DISAS(...) do { } while (0)
56 #endif
57 /*****************************************************************************/
58 /* Code translation helpers */
60 /* global register indexes */
61 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */
62 + 10 * 4 + 22 * 5 /* SPE GPRh */
63 + 8 * 5 /* CRF */];
64 static TCGv cpu_gpr[32];
65 static TCGv cpu_gprh[32];
66 static TCGv_i32 cpu_crf[8];
67 static TCGv cpu_nip;
68 static TCGv cpu_msr;
69 static TCGv cpu_ctr;
70 static TCGv cpu_lr;
71 #if defined(TARGET_PPC64)
72 static TCGv cpu_cfar;
73 #endif
74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
75 static TCGv cpu_reserve;
76 static TCGv cpu_reserve_val;
77 static TCGv cpu_fpscr;
78 static TCGv_i32 cpu_access_type;
80 #include "exec/gen-icount.h"
82 void ppc_translate_init(void)
84 int i;
85 char *p;
86 size_t cpu_reg_names_size;
88 p = cpu_reg_names;
89 cpu_reg_names_size = sizeof(cpu_reg_names);
91 for (i = 0; i < 8; i++) {
92 snprintf(p, cpu_reg_names_size, "crf%d", i);
93 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
94 offsetof(CPUPPCState, crf[i]), p);
95 p += 5;
96 cpu_reg_names_size -= 5;
99 for (i = 0; i < 32; i++) {
100 snprintf(p, cpu_reg_names_size, "r%d", i);
101 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
102 offsetof(CPUPPCState, gpr[i]), p);
103 p += (i < 10) ? 3 : 4;
104 cpu_reg_names_size -= (i < 10) ? 3 : 4;
105 snprintf(p, cpu_reg_names_size, "r%dH", i);
106 cpu_gprh[i] = tcg_global_mem_new(cpu_env,
107 offsetof(CPUPPCState, gprh[i]), p);
108 p += (i < 10) ? 4 : 5;
109 cpu_reg_names_size -= (i < 10) ? 4 : 5;
112 cpu_nip = tcg_global_mem_new(cpu_env,
113 offsetof(CPUPPCState, nip), "nip");
115 cpu_msr = tcg_global_mem_new(cpu_env,
116 offsetof(CPUPPCState, msr), "msr");
118 cpu_ctr = tcg_global_mem_new(cpu_env,
119 offsetof(CPUPPCState, ctr), "ctr");
121 cpu_lr = tcg_global_mem_new(cpu_env,
122 offsetof(CPUPPCState, lr), "lr");
124 #if defined(TARGET_PPC64)
125 cpu_cfar = tcg_global_mem_new(cpu_env,
126 offsetof(CPUPPCState, cfar), "cfar");
127 #endif
129 cpu_xer = tcg_global_mem_new(cpu_env,
130 offsetof(CPUPPCState, xer), "xer");
131 cpu_so = tcg_global_mem_new(cpu_env,
132 offsetof(CPUPPCState, so), "SO");
133 cpu_ov = tcg_global_mem_new(cpu_env,
134 offsetof(CPUPPCState, ov), "OV");
135 cpu_ca = tcg_global_mem_new(cpu_env,
136 offsetof(CPUPPCState, ca), "CA");
137 cpu_ov32 = tcg_global_mem_new(cpu_env,
138 offsetof(CPUPPCState, ov32), "OV32");
139 cpu_ca32 = tcg_global_mem_new(cpu_env,
140 offsetof(CPUPPCState, ca32), "CA32");
142 cpu_reserve = tcg_global_mem_new(cpu_env,
143 offsetof(CPUPPCState, reserve_addr),
144 "reserve_addr");
145 cpu_reserve_val = tcg_global_mem_new(cpu_env,
146 offsetof(CPUPPCState, reserve_val),
147 "reserve_val");
149 cpu_fpscr = tcg_global_mem_new(cpu_env,
150 offsetof(CPUPPCState, fpscr), "fpscr");
152 cpu_access_type = tcg_global_mem_new_i32(cpu_env,
153 offsetof(CPUPPCState, access_type),
154 "access_type");
157 /* internal defines */
158 struct DisasContext {
159 DisasContextBase base;
160 target_ulong cia; /* current instruction address */
161 uint32_t opcode;
162 uint32_t exception;
163 /* Routine used to access memory */
164 bool pr, hv, dr, le_mode;
165 bool lazy_tlb_flush;
166 bool need_access_type;
167 int mem_idx;
168 int access_type;
169 /* Translation flags */
170 MemOp default_tcg_memop_mask;
171 #if defined(TARGET_PPC64)
172 bool sf_mode;
173 bool has_cfar;
174 #endif
175 bool fpu_enabled;
176 bool altivec_enabled;
177 bool vsx_enabled;
178 bool spe_enabled;
179 bool tm_enabled;
180 bool gtse;
181 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
182 int singlestep_enabled;
183 uint32_t flags;
184 uint64_t insns_flags;
185 uint64_t insns_flags2;
188 /* Return true iff byteswap is needed in a scalar memop */
189 static inline bool need_byteswap(const DisasContext *ctx)
191 #if defined(TARGET_WORDS_BIGENDIAN)
192 return ctx->le_mode;
193 #else
194 return !ctx->le_mode;
195 #endif
198 /* True when active word size < size of target_long. */
199 #ifdef TARGET_PPC64
200 # define NARROW_MODE(C) (!(C)->sf_mode)
201 #else
202 # define NARROW_MODE(C) 0
203 #endif
205 struct opc_handler_t {
206 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
207 uint32_t inval1;
208 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
209 uint32_t inval2;
210 /* instruction type */
211 uint64_t type;
212 /* extended instruction type */
213 uint64_t type2;
214 /* handler */
215 void (*handler)(DisasContext *ctx);
216 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
217 const char *oname;
218 #endif
219 #if defined(DO_PPC_STATISTICS)
220 uint64_t count;
221 #endif
224 /* SPR load/store helpers */
225 static inline void gen_load_spr(TCGv t, int reg)
227 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
230 static inline void gen_store_spr(int reg, TCGv t)
232 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
235 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
237 if (ctx->need_access_type && ctx->access_type != access_type) {
238 tcg_gen_movi_i32(cpu_access_type, access_type);
239 ctx->access_type = access_type;
243 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
245 if (NARROW_MODE(ctx)) {
246 nip = (uint32_t)nip;
248 tcg_gen_movi_tl(cpu_nip, nip);
251 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
253 TCGv_i32 t0, t1;
256 * These are all synchronous exceptions, we set the PC back to the
257 * faulting instruction
259 if (ctx->exception == POWERPC_EXCP_NONE) {
260 gen_update_nip(ctx, ctx->cia);
262 t0 = tcg_const_i32(excp);
263 t1 = tcg_const_i32(error);
264 gen_helper_raise_exception_err(cpu_env, t0, t1);
265 tcg_temp_free_i32(t0);
266 tcg_temp_free_i32(t1);
267 ctx->exception = excp;
268 ctx->base.is_jmp = DISAS_NORETURN;
271 static void gen_exception(DisasContext *ctx, uint32_t excp)
273 TCGv_i32 t0;
276 * These are all synchronous exceptions, we set the PC back to the
277 * faulting instruction
279 if (ctx->exception == POWERPC_EXCP_NONE) {
280 gen_update_nip(ctx, ctx->cia);
282 t0 = tcg_const_i32(excp);
283 gen_helper_raise_exception(cpu_env, t0);
284 tcg_temp_free_i32(t0);
285 ctx->exception = excp;
286 ctx->base.is_jmp = DISAS_NORETURN;
289 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
290 target_ulong nip)
292 TCGv_i32 t0;
294 gen_update_nip(ctx, nip);
295 t0 = tcg_const_i32(excp);
296 gen_helper_raise_exception(cpu_env, t0);
297 tcg_temp_free_i32(t0);
298 ctx->exception = excp;
299 ctx->base.is_jmp = DISAS_NORETURN;
303 * Tells the caller what is the appropriate exception to generate and prepares
304 * SPR registers for this exception.
306 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
307 * POWERPC_EXCP_DEBUG (on BookE).
309 static uint32_t gen_prep_dbgex(DisasContext *ctx)
311 if (ctx->flags & POWERPC_FLAG_DE) {
312 target_ulong dbsr = 0;
313 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
314 dbsr = DBCR0_ICMP;
315 } else {
316 /* Must have been branch */
317 dbsr = DBCR0_BRT;
319 TCGv t0 = tcg_temp_new();
320 gen_load_spr(t0, SPR_BOOKE_DBSR);
321 tcg_gen_ori_tl(t0, t0, dbsr);
322 gen_store_spr(SPR_BOOKE_DBSR, t0);
323 tcg_temp_free(t0);
324 return POWERPC_EXCP_DEBUG;
325 } else {
326 return POWERPC_EXCP_TRACE;
330 static void gen_debug_exception(DisasContext *ctx)
332 TCGv_i32 t0;
335 * These are all synchronous exceptions, we set the PC back to the
336 * faulting instruction
338 if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
339 (ctx->exception != POWERPC_EXCP_SYNC)) {
340 gen_update_nip(ctx, ctx->base.pc_next);
342 t0 = tcg_const_i32(EXCP_DEBUG);
343 gen_helper_raise_exception(cpu_env, t0);
344 tcg_temp_free_i32(t0);
345 ctx->base.is_jmp = DISAS_NORETURN;
348 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
350 /* Will be converted to program check if needed */
351 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
354 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
356 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
359 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
361 /* Will be converted to program check if needed */
362 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
365 /* Stop translation */
366 static inline void gen_stop_exception(DisasContext *ctx)
368 gen_update_nip(ctx, ctx->base.pc_next);
369 ctx->exception = POWERPC_EXCP_STOP;
372 #ifndef CONFIG_USER_ONLY
373 /* No need to update nip here, as execution flow will change */
374 static inline void gen_sync_exception(DisasContext *ctx)
376 ctx->exception = POWERPC_EXCP_SYNC;
378 #endif
380 /*****************************************************************************/
381 /* SPR READ/WRITE CALLBACKS */
383 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
385 #if 0
386 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
387 printf("ERROR: try to access SPR %d !\n", sprn);
388 #endif
391 /* #define PPC_DUMP_SPR_ACCESSES */
394 * Generic callbacks:
395 * do nothing but store/retrieve spr value
397 static void spr_load_dump_spr(int sprn)
399 #ifdef PPC_DUMP_SPR_ACCESSES
400 TCGv_i32 t0 = tcg_const_i32(sprn);
401 gen_helper_load_dump_spr(cpu_env, t0);
402 tcg_temp_free_i32(t0);
403 #endif
406 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
408 gen_load_spr(cpu_gpr[gprn], sprn);
409 spr_load_dump_spr(sprn);
412 static void spr_store_dump_spr(int sprn)
414 #ifdef PPC_DUMP_SPR_ACCESSES
415 TCGv_i32 t0 = tcg_const_i32(sprn);
416 gen_helper_store_dump_spr(cpu_env, t0);
417 tcg_temp_free_i32(t0);
418 #endif
421 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
423 gen_store_spr(sprn, cpu_gpr[gprn]);
424 spr_store_dump_spr(sprn);
427 #if !defined(CONFIG_USER_ONLY)
428 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
430 #ifdef TARGET_PPC64
431 TCGv t0 = tcg_temp_new();
432 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
433 gen_store_spr(sprn, t0);
434 tcg_temp_free(t0);
435 spr_store_dump_spr(sprn);
436 #else
437 spr_write_generic(ctx, sprn, gprn);
438 #endif
441 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
443 TCGv t0 = tcg_temp_new();
444 TCGv t1 = tcg_temp_new();
445 gen_load_spr(t0, sprn);
446 tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
447 tcg_gen_and_tl(t0, t0, t1);
448 gen_store_spr(sprn, t0);
449 tcg_temp_free(t0);
450 tcg_temp_free(t1);
453 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
457 #endif
459 /* SPR common to all PowerPC */
460 /* XER */
461 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
463 TCGv dst = cpu_gpr[gprn];
464 TCGv t0 = tcg_temp_new();
465 TCGv t1 = tcg_temp_new();
466 TCGv t2 = tcg_temp_new();
467 tcg_gen_mov_tl(dst, cpu_xer);
468 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
469 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
470 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
471 tcg_gen_or_tl(t0, t0, t1);
472 tcg_gen_or_tl(dst, dst, t2);
473 tcg_gen_or_tl(dst, dst, t0);
474 if (is_isa300(ctx)) {
475 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
476 tcg_gen_or_tl(dst, dst, t0);
477 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
478 tcg_gen_or_tl(dst, dst, t0);
480 tcg_temp_free(t0);
481 tcg_temp_free(t1);
482 tcg_temp_free(t2);
485 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
487 TCGv src = cpu_gpr[gprn];
488 /* Write all flags, while reading back check for isa300 */
489 tcg_gen_andi_tl(cpu_xer, src,
490 ~((1u << XER_SO) |
491 (1u << XER_OV) | (1u << XER_OV32) |
492 (1u << XER_CA) | (1u << XER_CA32)));
493 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
494 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
495 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
496 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
497 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
500 /* LR */
501 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
503 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
506 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
508 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
511 /* CFAR */
512 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
513 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
515 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
518 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
520 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
522 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
524 /* CTR */
525 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
527 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
530 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
532 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
535 /* User read access to SPR */
536 /* USPRx */
537 /* UMMCRx */
538 /* UPMCx */
539 /* USIA */
540 /* UDECR */
541 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
543 gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
546 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
547 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
549 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
551 #endif
553 /* SPR common to all non-embedded PowerPC */
554 /* DECR */
555 #if !defined(CONFIG_USER_ONLY)
556 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
558 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
559 gen_io_start();
561 gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
562 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
563 gen_stop_exception(ctx);
567 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
569 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
570 gen_io_start();
572 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
573 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
574 gen_stop_exception(ctx);
577 #endif
579 /* SPR common to all non-embedded PowerPC, except 601 */
580 /* Time base */
581 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
583 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
584 gen_io_start();
586 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
587 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
588 gen_io_end();
589 gen_stop_exception(ctx);
593 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
595 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
596 gen_io_start();
598 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
599 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
600 gen_io_end();
601 gen_stop_exception(ctx);
605 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
607 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
610 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
612 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
615 #if !defined(CONFIG_USER_ONLY)
616 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
618 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
619 gen_io_start();
621 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
622 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
623 gen_io_end();
624 gen_stop_exception(ctx);
628 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
630 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
631 gen_io_start();
633 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
634 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
635 gen_io_end();
636 gen_stop_exception(ctx);
640 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
642 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
645 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
647 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
650 #if defined(TARGET_PPC64)
651 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
653 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
654 gen_io_start();
656 gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
657 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
658 gen_stop_exception(ctx);
662 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
664 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
665 gen_io_start();
667 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
668 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
669 gen_stop_exception(ctx);
673 /* HDECR */
674 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
676 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
677 gen_io_start();
679 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
680 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
681 gen_io_end();
682 gen_stop_exception(ctx);
686 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
688 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
689 gen_io_start();
691 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
692 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
693 gen_io_end();
694 gen_stop_exception(ctx);
698 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
700 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
701 gen_io_start();
703 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
704 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
705 gen_stop_exception(ctx);
709 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
711 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
712 gen_io_start();
714 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
715 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
716 gen_stop_exception(ctx);
720 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
722 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
723 gen_io_start();
725 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
726 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
727 gen_stop_exception(ctx);
731 #endif
732 #endif
734 #if !defined(CONFIG_USER_ONLY)
735 /* IBAT0U...IBAT0U */
736 /* IBAT0L...IBAT7L */
737 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
739 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
740 offsetof(CPUPPCState,
741 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
744 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
746 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
747 offsetof(CPUPPCState,
748 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
751 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
753 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
754 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
755 tcg_temp_free_i32(t0);
758 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
760 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
761 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
762 tcg_temp_free_i32(t0);
765 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
767 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
768 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
769 tcg_temp_free_i32(t0);
772 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
774 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
775 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
776 tcg_temp_free_i32(t0);
779 /* DBAT0U...DBAT7U */
780 /* DBAT0L...DBAT7L */
781 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
783 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
784 offsetof(CPUPPCState,
785 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
788 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
790 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
791 offsetof(CPUPPCState,
792 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
795 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
797 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
798 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
799 tcg_temp_free_i32(t0);
802 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
804 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
805 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
806 tcg_temp_free_i32(t0);
809 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
811 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
812 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
813 tcg_temp_free_i32(t0);
816 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
818 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
819 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
820 tcg_temp_free_i32(t0);
823 /* SDR1 */
824 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
826 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
829 #if defined(TARGET_PPC64)
830 /* 64 bits PowerPC specific SPRs */
831 /* PIDR */
832 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
834 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
837 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
839 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
842 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
844 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
847 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
849 TCGv t0 = tcg_temp_new();
850 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
851 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
852 tcg_temp_free(t0);
854 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
856 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
859 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
861 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
864 /* DPDES */
865 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
867 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
870 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
872 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
874 #endif
875 #endif
877 /* PowerPC 601 specific registers */
878 /* RTC */
879 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn)
881 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env);
884 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn)
886 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env);
889 #if !defined(CONFIG_USER_ONLY)
890 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn)
892 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]);
895 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn)
897 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]);
900 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn)
902 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]);
903 /* Must stop the translation as endianness may have changed */
904 gen_stop_exception(ctx);
906 #endif
908 /* Unified bats */
909 #if !defined(CONFIG_USER_ONLY)
910 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn)
912 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
913 offsetof(CPUPPCState,
914 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
917 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn)
919 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
920 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]);
921 tcg_temp_free_i32(t0);
924 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn)
926 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
927 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]);
928 tcg_temp_free_i32(t0);
930 #endif
932 /* PowerPC 40x specific registers */
933 #if !defined(CONFIG_USER_ONLY)
934 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
936 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
937 gen_io_start();
939 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
940 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
941 gen_stop_exception(ctx);
945 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
947 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
948 gen_io_start();
950 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
951 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
952 gen_stop_exception(ctx);
956 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
958 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
959 gen_io_start();
961 gen_store_spr(sprn, cpu_gpr[gprn]);
962 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
963 /* We must stop translation as we may have rebooted */
964 gen_stop_exception(ctx);
965 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
966 gen_stop_exception(ctx);
970 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
972 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
973 gen_io_start();
975 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
976 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
977 gen_stop_exception(ctx);
981 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
983 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
984 gen_io_start();
986 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
987 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
988 gen_stop_exception(ctx);
992 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
994 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
995 gen_io_start();
997 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
998 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
999 gen_stop_exception(ctx);
1002 #endif
1004 /* PowerPC 403 specific registers */
1005 /* PBL1 / PBU1 / PBL2 / PBU2 */
1006 #if !defined(CONFIG_USER_ONLY)
1007 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn)
1009 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
1010 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1]));
1013 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn)
1015 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1);
1016 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]);
1017 tcg_temp_free_i32(t0);
1020 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
1022 TCGv t0 = tcg_temp_new();
1023 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
1024 gen_store_spr(SPR_PIR, t0);
1025 tcg_temp_free(t0);
1027 #endif
1029 /* SPE specific registers */
1030 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
1032 TCGv_i32 t0 = tcg_temp_new_i32();
1033 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
1034 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
1035 tcg_temp_free_i32(t0);
1038 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
1040 TCGv_i32 t0 = tcg_temp_new_i32();
1041 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
1042 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
1043 tcg_temp_free_i32(t0);
1046 #if !defined(CONFIG_USER_ONLY)
1047 /* Callback used to write the exception vector base */
1048 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
1050 TCGv t0 = tcg_temp_new();
1051 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
1052 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1053 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
1054 gen_store_spr(sprn, t0);
1055 tcg_temp_free(t0);
1058 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
1060 int sprn_offs;
1062 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
1063 sprn_offs = sprn - SPR_BOOKE_IVOR0;
1064 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
1065 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
1066 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
1067 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
1068 } else {
1069 printf("Trying to write an unknown exception vector %d %03x\n",
1070 sprn, sprn);
1071 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
1072 return;
1075 TCGv t0 = tcg_temp_new();
1076 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
1077 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1078 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
1079 gen_store_spr(sprn, t0);
1080 tcg_temp_free(t0);
1082 #endif
1084 #ifdef TARGET_PPC64
1085 #ifndef CONFIG_USER_ONLY
1086 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
1088 TCGv t0 = tcg_temp_new();
1089 TCGv t1 = tcg_temp_new();
1090 TCGv t2 = tcg_temp_new();
1093 * Note, the HV=1 PR=0 case is handled earlier by simply using
1094 * spr_write_generic for HV mode in the SPR table
1097 /* Build insertion mask into t1 based on context */
1098 if (ctx->pr) {
1099 gen_load_spr(t1, SPR_UAMOR);
1100 } else {
1101 gen_load_spr(t1, SPR_AMOR);
1104 /* Mask new bits into t2 */
1105 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1107 /* Load AMR and clear new bits in t0 */
1108 gen_load_spr(t0, SPR_AMR);
1109 tcg_gen_andc_tl(t0, t0, t1);
1111 /* Or'in new bits and write it out */
1112 tcg_gen_or_tl(t0, t0, t2);
1113 gen_store_spr(SPR_AMR, t0);
1114 spr_store_dump_spr(SPR_AMR);
1116 tcg_temp_free(t0);
1117 tcg_temp_free(t1);
1118 tcg_temp_free(t2);
1121 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1123 TCGv t0 = tcg_temp_new();
1124 TCGv t1 = tcg_temp_new();
1125 TCGv t2 = tcg_temp_new();
1128 * Note, the HV=1 case is handled earlier by simply using
1129 * spr_write_generic for HV mode in the SPR table
1132 /* Build insertion mask into t1 based on context */
1133 gen_load_spr(t1, SPR_AMOR);
1135 /* Mask new bits into t2 */
1136 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1138 /* Load AMR and clear new bits in t0 */
1139 gen_load_spr(t0, SPR_UAMOR);
1140 tcg_gen_andc_tl(t0, t0, t1);
1142 /* Or'in new bits and write it out */
1143 tcg_gen_or_tl(t0, t0, t2);
1144 gen_store_spr(SPR_UAMOR, t0);
1145 spr_store_dump_spr(SPR_UAMOR);
1147 tcg_temp_free(t0);
1148 tcg_temp_free(t1);
1149 tcg_temp_free(t2);
1152 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1154 TCGv t0 = tcg_temp_new();
1155 TCGv t1 = tcg_temp_new();
1156 TCGv t2 = tcg_temp_new();
1159 * Note, the HV=1 case is handled earlier by simply using
1160 * spr_write_generic for HV mode in the SPR table
1163 /* Build insertion mask into t1 based on context */
1164 gen_load_spr(t1, SPR_AMOR);
1166 /* Mask new bits into t2 */
1167 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1169 /* Load AMR and clear new bits in t0 */
1170 gen_load_spr(t0, SPR_IAMR);
1171 tcg_gen_andc_tl(t0, t0, t1);
1173 /* Or'in new bits and write it out */
1174 tcg_gen_or_tl(t0, t0, t2);
1175 gen_store_spr(SPR_IAMR, t0);
1176 spr_store_dump_spr(SPR_IAMR);
1178 tcg_temp_free(t0);
1179 tcg_temp_free(t1);
1180 tcg_temp_free(t2);
1182 #endif
1183 #endif
1185 #ifndef CONFIG_USER_ONLY
1186 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1188 gen_helper_fixup_thrm(cpu_env);
1189 gen_load_spr(cpu_gpr[gprn], sprn);
1190 spr_load_dump_spr(sprn);
1192 #endif /* !CONFIG_USER_ONLY */
1194 #if !defined(CONFIG_USER_ONLY)
1195 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1197 TCGv t0 = tcg_temp_new();
1199 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1200 gen_store_spr(sprn, t0);
1201 tcg_temp_free(t0);
1204 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1206 TCGv t0 = tcg_temp_new();
1208 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1209 gen_store_spr(sprn, t0);
1210 tcg_temp_free(t0);
1213 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1215 TCGv t0 = tcg_temp_new();
1217 tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1218 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1219 gen_store_spr(sprn, t0);
1220 tcg_temp_free(t0);
1223 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1225 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1228 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1230 TCGv_i32 t0 = tcg_const_i32(sprn);
1231 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1232 tcg_temp_free_i32(t0);
1234 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1236 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1238 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1240 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1243 #endif
1245 #if !defined(CONFIG_USER_ONLY)
1246 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1248 TCGv val = tcg_temp_new();
1249 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1250 gen_store_spr(SPR_BOOKE_MAS3, val);
1251 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1252 gen_store_spr(SPR_BOOKE_MAS7, val);
1253 tcg_temp_free(val);
1256 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1258 TCGv mas7 = tcg_temp_new();
1259 TCGv mas3 = tcg_temp_new();
1260 gen_load_spr(mas7, SPR_BOOKE_MAS7);
1261 tcg_gen_shli_tl(mas7, mas7, 32);
1262 gen_load_spr(mas3, SPR_BOOKE_MAS3);
1263 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1264 tcg_temp_free(mas3);
1265 tcg_temp_free(mas7);
1268 #endif
1270 #ifdef TARGET_PPC64
1271 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1272 int bit, int sprn, int cause)
1274 TCGv_i32 t1 = tcg_const_i32(bit);
1275 TCGv_i32 t2 = tcg_const_i32(sprn);
1276 TCGv_i32 t3 = tcg_const_i32(cause);
1278 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1280 tcg_temp_free_i32(t3);
1281 tcg_temp_free_i32(t2);
1282 tcg_temp_free_i32(t1);
1285 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1286 int bit, int sprn, int cause)
1288 TCGv_i32 t1 = tcg_const_i32(bit);
1289 TCGv_i32 t2 = tcg_const_i32(sprn);
1290 TCGv_i32 t3 = tcg_const_i32(cause);
1292 gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1294 tcg_temp_free_i32(t3);
1295 tcg_temp_free_i32(t2);
1296 tcg_temp_free_i32(t1);
1299 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1301 TCGv spr_up = tcg_temp_new();
1302 TCGv spr = tcg_temp_new();
1304 gen_load_spr(spr, sprn - 1);
1305 tcg_gen_shri_tl(spr_up, spr, 32);
1306 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1308 tcg_temp_free(spr);
1309 tcg_temp_free(spr_up);
1312 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1314 TCGv spr = tcg_temp_new();
1316 gen_load_spr(spr, sprn - 1);
1317 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1318 gen_store_spr(sprn - 1, spr);
1320 tcg_temp_free(spr);
1323 #if !defined(CONFIG_USER_ONLY)
1324 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1326 TCGv hmer = tcg_temp_new();
1328 gen_load_spr(hmer, sprn);
1329 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1330 gen_store_spr(sprn, hmer);
1331 spr_store_dump_spr(sprn);
1332 tcg_temp_free(hmer);
1335 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1337 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1339 #endif /* !defined(CONFIG_USER_ONLY) */
1341 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1343 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1344 spr_read_generic(ctx, gprn, sprn);
1347 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1349 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1350 spr_write_generic(ctx, sprn, gprn);
1353 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1355 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1356 spr_read_generic(ctx, gprn, sprn);
1359 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1361 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1362 spr_write_generic(ctx, sprn, gprn);
1365 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1367 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1368 spr_read_prev_upper32(ctx, gprn, sprn);
1371 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1373 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1374 spr_write_prev_upper32(ctx, sprn, gprn);
1377 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1379 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1380 spr_read_generic(ctx, gprn, sprn);
1383 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1385 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1386 spr_write_generic(ctx, sprn, gprn);
1389 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1391 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1392 spr_read_prev_upper32(ctx, gprn, sprn);
1395 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1397 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1398 spr_write_prev_upper32(ctx, sprn, gprn);
1400 #endif
1402 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
1403 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1405 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
1406 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1408 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
1409 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1411 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
1412 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1414 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
1415 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1417 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1418 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1420 typedef struct opcode_t {
1421 unsigned char opc1, opc2, opc3, opc4;
1422 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1423 unsigned char pad[4];
1424 #endif
1425 opc_handler_t handler;
1426 const char *oname;
1427 } opcode_t;
1429 /* Helpers for priv. check */
1430 #define GEN_PRIV \
1431 do { \
1432 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
1433 } while (0)
1435 #if defined(CONFIG_USER_ONLY)
1436 #define CHK_HV GEN_PRIV
1437 #define CHK_SV GEN_PRIV
1438 #define CHK_HVRM GEN_PRIV
1439 #else
1440 #define CHK_HV \
1441 do { \
1442 if (unlikely(ctx->pr || !ctx->hv)) { \
1443 GEN_PRIV; \
1445 } while (0)
1446 #define CHK_SV \
1447 do { \
1448 if (unlikely(ctx->pr)) { \
1449 GEN_PRIV; \
1451 } while (0)
1452 #define CHK_HVRM \
1453 do { \
1454 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1455 GEN_PRIV; \
1457 } while (0)
1458 #endif
1460 #define CHK_NONE
1462 /*****************************************************************************/
1463 /* PowerPC instructions table */
1465 #if defined(DO_PPC_STATISTICS)
1466 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1468 .opc1 = op1, \
1469 .opc2 = op2, \
1470 .opc3 = op3, \
1471 .opc4 = 0xff, \
1472 .handler = { \
1473 .inval1 = invl, \
1474 .type = _typ, \
1475 .type2 = _typ2, \
1476 .handler = &gen_##name, \
1477 .oname = stringify(name), \
1478 }, \
1479 .oname = stringify(name), \
1481 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1483 .opc1 = op1, \
1484 .opc2 = op2, \
1485 .opc3 = op3, \
1486 .opc4 = 0xff, \
1487 .handler = { \
1488 .inval1 = invl1, \
1489 .inval2 = invl2, \
1490 .type = _typ, \
1491 .type2 = _typ2, \
1492 .handler = &gen_##name, \
1493 .oname = stringify(name), \
1494 }, \
1495 .oname = stringify(name), \
1497 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1499 .opc1 = op1, \
1500 .opc2 = op2, \
1501 .opc3 = op3, \
1502 .opc4 = 0xff, \
1503 .handler = { \
1504 .inval1 = invl, \
1505 .type = _typ, \
1506 .type2 = _typ2, \
1507 .handler = &gen_##name, \
1508 .oname = onam, \
1509 }, \
1510 .oname = onam, \
1512 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1514 .opc1 = op1, \
1515 .opc2 = op2, \
1516 .opc3 = op3, \
1517 .opc4 = op4, \
1518 .handler = { \
1519 .inval1 = invl, \
1520 .type = _typ, \
1521 .type2 = _typ2, \
1522 .handler = &gen_##name, \
1523 .oname = stringify(name), \
1524 }, \
1525 .oname = stringify(name), \
1527 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1529 .opc1 = op1, \
1530 .opc2 = op2, \
1531 .opc3 = op3, \
1532 .opc4 = op4, \
1533 .handler = { \
1534 .inval1 = invl, \
1535 .type = _typ, \
1536 .type2 = _typ2, \
1537 .handler = &gen_##name, \
1538 .oname = onam, \
1539 }, \
1540 .oname = onam, \
1542 #else
1543 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1545 .opc1 = op1, \
1546 .opc2 = op2, \
1547 .opc3 = op3, \
1548 .opc4 = 0xff, \
1549 .handler = { \
1550 .inval1 = invl, \
1551 .type = _typ, \
1552 .type2 = _typ2, \
1553 .handler = &gen_##name, \
1554 }, \
1555 .oname = stringify(name), \
1557 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1559 .opc1 = op1, \
1560 .opc2 = op2, \
1561 .opc3 = op3, \
1562 .opc4 = 0xff, \
1563 .handler = { \
1564 .inval1 = invl1, \
1565 .inval2 = invl2, \
1566 .type = _typ, \
1567 .type2 = _typ2, \
1568 .handler = &gen_##name, \
1569 }, \
1570 .oname = stringify(name), \
1572 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1574 .opc1 = op1, \
1575 .opc2 = op2, \
1576 .opc3 = op3, \
1577 .opc4 = 0xff, \
1578 .handler = { \
1579 .inval1 = invl, \
1580 .type = _typ, \
1581 .type2 = _typ2, \
1582 .handler = &gen_##name, \
1583 }, \
1584 .oname = onam, \
1586 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1588 .opc1 = op1, \
1589 .opc2 = op2, \
1590 .opc3 = op3, \
1591 .opc4 = op4, \
1592 .handler = { \
1593 .inval1 = invl, \
1594 .type = _typ, \
1595 .type2 = _typ2, \
1596 .handler = &gen_##name, \
1597 }, \
1598 .oname = stringify(name), \
1600 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1602 .opc1 = op1, \
1603 .opc2 = op2, \
1604 .opc3 = op3, \
1605 .opc4 = op4, \
1606 .handler = { \
1607 .inval1 = invl, \
1608 .type = _typ, \
1609 .type2 = _typ2, \
1610 .handler = &gen_##name, \
1611 }, \
1612 .oname = onam, \
1614 #endif
1616 /* Invalid instruction */
1617 static void gen_invalid(DisasContext *ctx)
1619 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1622 static opc_handler_t invalid_handler = {
1623 .inval1 = 0xFFFFFFFF,
1624 .inval2 = 0xFFFFFFFF,
1625 .type = PPC_NONE,
1626 .type2 = PPC_NONE,
1627 .handler = gen_invalid,
1630 /*** Integer comparison ***/
1632 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1634 TCGv t0 = tcg_temp_new();
1635 TCGv t1 = tcg_temp_new();
1636 TCGv_i32 t = tcg_temp_new_i32();
1638 tcg_gen_movi_tl(t0, CRF_EQ);
1639 tcg_gen_movi_tl(t1, CRF_LT);
1640 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1641 t0, arg0, arg1, t1, t0);
1642 tcg_gen_movi_tl(t1, CRF_GT);
1643 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1644 t0, arg0, arg1, t1, t0);
1646 tcg_gen_trunc_tl_i32(t, t0);
1647 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1648 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1650 tcg_temp_free(t0);
1651 tcg_temp_free(t1);
1652 tcg_temp_free_i32(t);
1655 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1657 TCGv t0 = tcg_const_tl(arg1);
1658 gen_op_cmp(arg0, t0, s, crf);
1659 tcg_temp_free(t0);
1662 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1664 TCGv t0, t1;
1665 t0 = tcg_temp_new();
1666 t1 = tcg_temp_new();
1667 if (s) {
1668 tcg_gen_ext32s_tl(t0, arg0);
1669 tcg_gen_ext32s_tl(t1, arg1);
1670 } else {
1671 tcg_gen_ext32u_tl(t0, arg0);
1672 tcg_gen_ext32u_tl(t1, arg1);
1674 gen_op_cmp(t0, t1, s, crf);
1675 tcg_temp_free(t1);
1676 tcg_temp_free(t0);
1679 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1681 TCGv t0 = tcg_const_tl(arg1);
1682 gen_op_cmp32(arg0, t0, s, crf);
1683 tcg_temp_free(t0);
1686 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1688 if (NARROW_MODE(ctx)) {
1689 gen_op_cmpi32(reg, 0, 1, 0);
1690 } else {
1691 gen_op_cmpi(reg, 0, 1, 0);
1695 /* cmp */
1696 static void gen_cmp(DisasContext *ctx)
1698 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1699 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1700 1, crfD(ctx->opcode));
1701 } else {
1702 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1703 1, crfD(ctx->opcode));
1707 /* cmpi */
1708 static void gen_cmpi(DisasContext *ctx)
1710 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1711 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1712 1, crfD(ctx->opcode));
1713 } else {
1714 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1715 1, crfD(ctx->opcode));
1719 /* cmpl */
1720 static void gen_cmpl(DisasContext *ctx)
1722 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1723 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1724 0, crfD(ctx->opcode));
1725 } else {
1726 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1727 0, crfD(ctx->opcode));
1731 /* cmpli */
1732 static void gen_cmpli(DisasContext *ctx)
1734 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1735 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1736 0, crfD(ctx->opcode));
1737 } else {
1738 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1739 0, crfD(ctx->opcode));
1743 /* cmprb - range comparison: isupper, isaplha, islower*/
1744 static void gen_cmprb(DisasContext *ctx)
1746 TCGv_i32 src1 = tcg_temp_new_i32();
1747 TCGv_i32 src2 = tcg_temp_new_i32();
1748 TCGv_i32 src2lo = tcg_temp_new_i32();
1749 TCGv_i32 src2hi = tcg_temp_new_i32();
1750 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1752 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1753 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1755 tcg_gen_andi_i32(src1, src1, 0xFF);
1756 tcg_gen_ext8u_i32(src2lo, src2);
1757 tcg_gen_shri_i32(src2, src2, 8);
1758 tcg_gen_ext8u_i32(src2hi, src2);
1760 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1761 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1762 tcg_gen_and_i32(crf, src2lo, src2hi);
1764 if (ctx->opcode & 0x00200000) {
1765 tcg_gen_shri_i32(src2, src2, 8);
1766 tcg_gen_ext8u_i32(src2lo, src2);
1767 tcg_gen_shri_i32(src2, src2, 8);
1768 tcg_gen_ext8u_i32(src2hi, src2);
1769 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1770 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1771 tcg_gen_and_i32(src2lo, src2lo, src2hi);
1772 tcg_gen_or_i32(crf, crf, src2lo);
1774 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1775 tcg_temp_free_i32(src1);
1776 tcg_temp_free_i32(src2);
1777 tcg_temp_free_i32(src2lo);
1778 tcg_temp_free_i32(src2hi);
1781 #if defined(TARGET_PPC64)
1782 /* cmpeqb */
1783 static void gen_cmpeqb(DisasContext *ctx)
1785 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1786 cpu_gpr[rB(ctx->opcode)]);
1788 #endif
1790 /* isel (PowerPC 2.03 specification) */
1791 static void gen_isel(DisasContext *ctx)
1793 uint32_t bi = rC(ctx->opcode);
1794 uint32_t mask = 0x08 >> (bi & 0x03);
1795 TCGv t0 = tcg_temp_new();
1796 TCGv zr;
1798 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1799 tcg_gen_andi_tl(t0, t0, mask);
1801 zr = tcg_const_tl(0);
1802 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1803 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1804 cpu_gpr[rB(ctx->opcode)]);
1805 tcg_temp_free(zr);
1806 tcg_temp_free(t0);
1809 /* cmpb: PowerPC 2.05 specification */
1810 static void gen_cmpb(DisasContext *ctx)
1812 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1813 cpu_gpr[rB(ctx->opcode)]);
1816 /*** Integer arithmetic ***/
1818 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1819 TCGv arg1, TCGv arg2, int sub)
1821 TCGv t0 = tcg_temp_new();
1823 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1824 tcg_gen_xor_tl(t0, arg1, arg2);
1825 if (sub) {
1826 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1827 } else {
1828 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1830 tcg_temp_free(t0);
1831 if (NARROW_MODE(ctx)) {
1832 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1833 if (is_isa300(ctx)) {
1834 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1836 } else {
1837 if (is_isa300(ctx)) {
1838 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1840 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1842 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1845 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1846 TCGv res, TCGv arg0, TCGv arg1,
1847 TCGv ca32, int sub)
1849 TCGv t0;
1851 if (!is_isa300(ctx)) {
1852 return;
1855 t0 = tcg_temp_new();
1856 if (sub) {
1857 tcg_gen_eqv_tl(t0, arg0, arg1);
1858 } else {
1859 tcg_gen_xor_tl(t0, arg0, arg1);
1861 tcg_gen_xor_tl(t0, t0, res);
1862 tcg_gen_extract_tl(ca32, t0, 32, 1);
1863 tcg_temp_free(t0);
1866 /* Common add function */
1867 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1868 TCGv arg2, TCGv ca, TCGv ca32,
1869 bool add_ca, bool compute_ca,
1870 bool compute_ov, bool compute_rc0)
1872 TCGv t0 = ret;
1874 if (compute_ca || compute_ov) {
1875 t0 = tcg_temp_new();
1878 if (compute_ca) {
1879 if (NARROW_MODE(ctx)) {
1881 * Caution: a non-obvious corner case of the spec is that
1882 * we must produce the *entire* 64-bit addition, but
1883 * produce the carry into bit 32.
1885 TCGv t1 = tcg_temp_new();
1886 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
1887 tcg_gen_add_tl(t0, arg1, arg2);
1888 if (add_ca) {
1889 tcg_gen_add_tl(t0, t0, ca);
1891 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */
1892 tcg_temp_free(t1);
1893 tcg_gen_extract_tl(ca, ca, 32, 1);
1894 if (is_isa300(ctx)) {
1895 tcg_gen_mov_tl(ca32, ca);
1897 } else {
1898 TCGv zero = tcg_const_tl(0);
1899 if (add_ca) {
1900 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1901 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1902 } else {
1903 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1905 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1906 tcg_temp_free(zero);
1908 } else {
1909 tcg_gen_add_tl(t0, arg1, arg2);
1910 if (add_ca) {
1911 tcg_gen_add_tl(t0, t0, ca);
1915 if (compute_ov) {
1916 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1918 if (unlikely(compute_rc0)) {
1919 gen_set_Rc0(ctx, t0);
1922 if (t0 != ret) {
1923 tcg_gen_mov_tl(ret, t0);
1924 tcg_temp_free(t0);
1927 /* Add functions with two operands */
1928 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \
1929 static void glue(gen_, name)(DisasContext *ctx) \
1931 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1932 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1933 ca, glue(ca, 32), \
1934 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1936 /* Add functions with one operand and one immediate */
1937 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \
1938 add_ca, compute_ca, compute_ov) \
1939 static void glue(gen_, name)(DisasContext *ctx) \
1941 TCGv t0 = tcg_const_tl(const_val); \
1942 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1943 cpu_gpr[rA(ctx->opcode)], t0, \
1944 ca, glue(ca, 32), \
1945 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1946 tcg_temp_free(t0); \
1949 /* add add. addo addo. */
1950 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1951 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1952 /* addc addc. addco addco. */
1953 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1954 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1955 /* adde adde. addeo addeo. */
1956 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1957 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1958 /* addme addme. addmeo addmeo. */
1959 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1960 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1961 /* addex */
1962 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1963 /* addze addze. addzeo addzeo.*/
1964 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1965 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1966 /* addi */
1967 static void gen_addi(DisasContext *ctx)
1969 target_long simm = SIMM(ctx->opcode);
1971 if (rA(ctx->opcode) == 0) {
1972 /* li case */
1973 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1974 } else {
1975 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1976 cpu_gpr[rA(ctx->opcode)], simm);
1979 /* addic addic.*/
1980 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1982 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1983 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1984 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1985 tcg_temp_free(c);
1988 static void gen_addic(DisasContext *ctx)
1990 gen_op_addic(ctx, 0);
1993 static void gen_addic_(DisasContext *ctx)
1995 gen_op_addic(ctx, 1);
1998 /* addis */
1999 static void gen_addis(DisasContext *ctx)
2001 target_long simm = SIMM(ctx->opcode);
2003 if (rA(ctx->opcode) == 0) {
2004 /* lis case */
2005 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
2006 } else {
2007 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
2008 cpu_gpr[rA(ctx->opcode)], simm << 16);
2012 /* addpcis */
2013 static void gen_addpcis(DisasContext *ctx)
2015 target_long d = DX(ctx->opcode);
2017 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16));
2020 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
2021 TCGv arg2, int sign, int compute_ov)
2023 TCGv_i32 t0 = tcg_temp_new_i32();
2024 TCGv_i32 t1 = tcg_temp_new_i32();
2025 TCGv_i32 t2 = tcg_temp_new_i32();
2026 TCGv_i32 t3 = tcg_temp_new_i32();
2028 tcg_gen_trunc_tl_i32(t0, arg1);
2029 tcg_gen_trunc_tl_i32(t1, arg2);
2030 if (sign) {
2031 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
2032 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
2033 tcg_gen_and_i32(t2, t2, t3);
2034 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
2035 tcg_gen_or_i32(t2, t2, t3);
2036 tcg_gen_movi_i32(t3, 0);
2037 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
2038 tcg_gen_div_i32(t3, t0, t1);
2039 tcg_gen_extu_i32_tl(ret, t3);
2040 } else {
2041 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
2042 tcg_gen_movi_i32(t3, 0);
2043 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
2044 tcg_gen_divu_i32(t3, t0, t1);
2045 tcg_gen_extu_i32_tl(ret, t3);
2047 if (compute_ov) {
2048 tcg_gen_extu_i32_tl(cpu_ov, t2);
2049 if (is_isa300(ctx)) {
2050 tcg_gen_extu_i32_tl(cpu_ov32, t2);
2052 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2054 tcg_temp_free_i32(t0);
2055 tcg_temp_free_i32(t1);
2056 tcg_temp_free_i32(t2);
2057 tcg_temp_free_i32(t3);
2059 if (unlikely(Rc(ctx->opcode) != 0)) {
2060 gen_set_Rc0(ctx, ret);
2063 /* Div functions */
2064 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
2065 static void glue(gen_, name)(DisasContext *ctx) \
2067 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
2068 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2069 sign, compute_ov); \
2071 /* divwu divwu. divwuo divwuo. */
2072 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
2073 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
2074 /* divw divw. divwo divwo. */
2075 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
2076 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
2078 /* div[wd]eu[o][.] */
2079 #define GEN_DIVE(name, hlpr, compute_ov) \
2080 static void gen_##name(DisasContext *ctx) \
2082 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
2083 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
2084 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
2085 tcg_temp_free_i32(t0); \
2086 if (unlikely(Rc(ctx->opcode) != 0)) { \
2087 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
2091 GEN_DIVE(divweu, divweu, 0);
2092 GEN_DIVE(divweuo, divweu, 1);
2093 GEN_DIVE(divwe, divwe, 0);
2094 GEN_DIVE(divweo, divwe, 1);
2096 #if defined(TARGET_PPC64)
2097 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
2098 TCGv arg2, int sign, int compute_ov)
2100 TCGv_i64 t0 = tcg_temp_new_i64();
2101 TCGv_i64 t1 = tcg_temp_new_i64();
2102 TCGv_i64 t2 = tcg_temp_new_i64();
2103 TCGv_i64 t3 = tcg_temp_new_i64();
2105 tcg_gen_mov_i64(t0, arg1);
2106 tcg_gen_mov_i64(t1, arg2);
2107 if (sign) {
2108 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
2109 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
2110 tcg_gen_and_i64(t2, t2, t3);
2111 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
2112 tcg_gen_or_i64(t2, t2, t3);
2113 tcg_gen_movi_i64(t3, 0);
2114 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2115 tcg_gen_div_i64(ret, t0, t1);
2116 } else {
2117 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
2118 tcg_gen_movi_i64(t3, 0);
2119 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2120 tcg_gen_divu_i64(ret, t0, t1);
2122 if (compute_ov) {
2123 tcg_gen_mov_tl(cpu_ov, t2);
2124 if (is_isa300(ctx)) {
2125 tcg_gen_mov_tl(cpu_ov32, t2);
2127 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2129 tcg_temp_free_i64(t0);
2130 tcg_temp_free_i64(t1);
2131 tcg_temp_free_i64(t2);
2132 tcg_temp_free_i64(t3);
2134 if (unlikely(Rc(ctx->opcode) != 0)) {
2135 gen_set_Rc0(ctx, ret);
2139 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
2140 static void glue(gen_, name)(DisasContext *ctx) \
2142 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
2143 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2144 sign, compute_ov); \
2146 /* divdu divdu. divduo divduo. */
2147 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
2148 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
2149 /* divd divd. divdo divdo. */
2150 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
2151 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
2153 GEN_DIVE(divdeu, divdeu, 0);
2154 GEN_DIVE(divdeuo, divdeu, 1);
2155 GEN_DIVE(divde, divde, 0);
2156 GEN_DIVE(divdeo, divde, 1);
2157 #endif
2159 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
2160 TCGv arg2, int sign)
2162 TCGv_i32 t0 = tcg_temp_new_i32();
2163 TCGv_i32 t1 = tcg_temp_new_i32();
2165 tcg_gen_trunc_tl_i32(t0, arg1);
2166 tcg_gen_trunc_tl_i32(t1, arg2);
2167 if (sign) {
2168 TCGv_i32 t2 = tcg_temp_new_i32();
2169 TCGv_i32 t3 = tcg_temp_new_i32();
2170 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
2171 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
2172 tcg_gen_and_i32(t2, t2, t3);
2173 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
2174 tcg_gen_or_i32(t2, t2, t3);
2175 tcg_gen_movi_i32(t3, 0);
2176 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
2177 tcg_gen_rem_i32(t3, t0, t1);
2178 tcg_gen_ext_i32_tl(ret, t3);
2179 tcg_temp_free_i32(t2);
2180 tcg_temp_free_i32(t3);
2181 } else {
2182 TCGv_i32 t2 = tcg_const_i32(1);
2183 TCGv_i32 t3 = tcg_const_i32(0);
2184 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
2185 tcg_gen_remu_i32(t3, t0, t1);
2186 tcg_gen_extu_i32_tl(ret, t3);
2187 tcg_temp_free_i32(t2);
2188 tcg_temp_free_i32(t3);
2190 tcg_temp_free_i32(t0);
2191 tcg_temp_free_i32(t1);
2194 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
2195 static void glue(gen_, name)(DisasContext *ctx) \
2197 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
2198 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2199 sign); \
2202 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
2203 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
2205 #if defined(TARGET_PPC64)
2206 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
2207 TCGv arg2, int sign)
2209 TCGv_i64 t0 = tcg_temp_new_i64();
2210 TCGv_i64 t1 = tcg_temp_new_i64();
2212 tcg_gen_mov_i64(t0, arg1);
2213 tcg_gen_mov_i64(t1, arg2);
2214 if (sign) {
2215 TCGv_i64 t2 = tcg_temp_new_i64();
2216 TCGv_i64 t3 = tcg_temp_new_i64();
2217 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
2218 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
2219 tcg_gen_and_i64(t2, t2, t3);
2220 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
2221 tcg_gen_or_i64(t2, t2, t3);
2222 tcg_gen_movi_i64(t3, 0);
2223 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2224 tcg_gen_rem_i64(ret, t0, t1);
2225 tcg_temp_free_i64(t2);
2226 tcg_temp_free_i64(t3);
2227 } else {
2228 TCGv_i64 t2 = tcg_const_i64(1);
2229 TCGv_i64 t3 = tcg_const_i64(0);
2230 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
2231 tcg_gen_remu_i64(ret, t0, t1);
2232 tcg_temp_free_i64(t2);
2233 tcg_temp_free_i64(t3);
2235 tcg_temp_free_i64(t0);
2236 tcg_temp_free_i64(t1);
2239 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
2240 static void glue(gen_, name)(DisasContext *ctx) \
2242 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
2243 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2244 sign); \
2247 GEN_INT_ARITH_MODD(modud, 0x08, 0);
2248 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
2249 #endif
2251 /* mulhw mulhw. */
2252 static void gen_mulhw(DisasContext *ctx)
2254 TCGv_i32 t0 = tcg_temp_new_i32();
2255 TCGv_i32 t1 = tcg_temp_new_i32();
2257 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2258 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2259 tcg_gen_muls2_i32(t0, t1, t0, t1);
2260 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2261 tcg_temp_free_i32(t0);
2262 tcg_temp_free_i32(t1);
2263 if (unlikely(Rc(ctx->opcode) != 0)) {
2264 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2268 /* mulhwu mulhwu. */
2269 static void gen_mulhwu(DisasContext *ctx)
2271 TCGv_i32 t0 = tcg_temp_new_i32();
2272 TCGv_i32 t1 = tcg_temp_new_i32();
2274 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2275 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2276 tcg_gen_mulu2_i32(t0, t1, t0, t1);
2277 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2278 tcg_temp_free_i32(t0);
2279 tcg_temp_free_i32(t1);
2280 if (unlikely(Rc(ctx->opcode) != 0)) {
2281 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2285 /* mullw mullw. */
2286 static void gen_mullw(DisasContext *ctx)
2288 #if defined(TARGET_PPC64)
2289 TCGv_i64 t0, t1;
2290 t0 = tcg_temp_new_i64();
2291 t1 = tcg_temp_new_i64();
2292 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2293 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2294 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2295 tcg_temp_free(t0);
2296 tcg_temp_free(t1);
2297 #else
2298 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2299 cpu_gpr[rB(ctx->opcode)]);
2300 #endif
2301 if (unlikely(Rc(ctx->opcode) != 0)) {
2302 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2306 /* mullwo mullwo. */
2307 static void gen_mullwo(DisasContext *ctx)
2309 TCGv_i32 t0 = tcg_temp_new_i32();
2310 TCGv_i32 t1 = tcg_temp_new_i32();
2312 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2313 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2314 tcg_gen_muls2_i32(t0, t1, t0, t1);
2315 #if defined(TARGET_PPC64)
2316 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2317 #else
2318 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2319 #endif
2321 tcg_gen_sari_i32(t0, t0, 31);
2322 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2323 tcg_gen_extu_i32_tl(cpu_ov, t0);
2324 if (is_isa300(ctx)) {
2325 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2327 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2329 tcg_temp_free_i32(t0);
2330 tcg_temp_free_i32(t1);
2331 if (unlikely(Rc(ctx->opcode) != 0)) {
2332 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2336 /* mulli */
2337 static void gen_mulli(DisasContext *ctx)
2339 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2340 SIMM(ctx->opcode));
2343 #if defined(TARGET_PPC64)
2344 /* mulhd mulhd. */
2345 static void gen_mulhd(DisasContext *ctx)
2347 TCGv lo = tcg_temp_new();
2348 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2349 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2350 tcg_temp_free(lo);
2351 if (unlikely(Rc(ctx->opcode) != 0)) {
2352 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2356 /* mulhdu mulhdu. */
2357 static void gen_mulhdu(DisasContext *ctx)
2359 TCGv lo = tcg_temp_new();
2360 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2361 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2362 tcg_temp_free(lo);
2363 if (unlikely(Rc(ctx->opcode) != 0)) {
2364 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2368 /* mulld mulld. */
2369 static void gen_mulld(DisasContext *ctx)
2371 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2372 cpu_gpr[rB(ctx->opcode)]);
2373 if (unlikely(Rc(ctx->opcode) != 0)) {
2374 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2378 /* mulldo mulldo. */
2379 static void gen_mulldo(DisasContext *ctx)
2381 TCGv_i64 t0 = tcg_temp_new_i64();
2382 TCGv_i64 t1 = tcg_temp_new_i64();
2384 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2385 cpu_gpr[rB(ctx->opcode)]);
2386 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2388 tcg_gen_sari_i64(t0, t0, 63);
2389 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2390 if (is_isa300(ctx)) {
2391 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2393 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2395 tcg_temp_free_i64(t0);
2396 tcg_temp_free_i64(t1);
2398 if (unlikely(Rc(ctx->opcode) != 0)) {
2399 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2402 #endif
2404 /* Common subf function */
2405 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2406 TCGv arg2, bool add_ca, bool compute_ca,
2407 bool compute_ov, bool compute_rc0)
2409 TCGv t0 = ret;
2411 if (compute_ca || compute_ov) {
2412 t0 = tcg_temp_new();
2415 if (compute_ca) {
2416 /* dest = ~arg1 + arg2 [+ ca]. */
2417 if (NARROW_MODE(ctx)) {
2419 * Caution: a non-obvious corner case of the spec is that
2420 * we must produce the *entire* 64-bit addition, but
2421 * produce the carry into bit 32.
2423 TCGv inv1 = tcg_temp_new();
2424 TCGv t1 = tcg_temp_new();
2425 tcg_gen_not_tl(inv1, arg1);
2426 if (add_ca) {
2427 tcg_gen_add_tl(t0, arg2, cpu_ca);
2428 } else {
2429 tcg_gen_addi_tl(t0, arg2, 1);
2431 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
2432 tcg_gen_add_tl(t0, t0, inv1);
2433 tcg_temp_free(inv1);
2434 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
2435 tcg_temp_free(t1);
2436 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2437 if (is_isa300(ctx)) {
2438 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2440 } else if (add_ca) {
2441 TCGv zero, inv1 = tcg_temp_new();
2442 tcg_gen_not_tl(inv1, arg1);
2443 zero = tcg_const_tl(0);
2444 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2445 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2446 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2447 tcg_temp_free(zero);
2448 tcg_temp_free(inv1);
2449 } else {
2450 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2451 tcg_gen_sub_tl(t0, arg2, arg1);
2452 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2454 } else if (add_ca) {
2456 * Since we're ignoring carry-out, we can simplify the
2457 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2459 tcg_gen_sub_tl(t0, arg2, arg1);
2460 tcg_gen_add_tl(t0, t0, cpu_ca);
2461 tcg_gen_subi_tl(t0, t0, 1);
2462 } else {
2463 tcg_gen_sub_tl(t0, arg2, arg1);
2466 if (compute_ov) {
2467 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2469 if (unlikely(compute_rc0)) {
2470 gen_set_Rc0(ctx, t0);
2473 if (t0 != ret) {
2474 tcg_gen_mov_tl(ret, t0);
2475 tcg_temp_free(t0);
2478 /* Sub functions with Two operands functions */
2479 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
2480 static void glue(gen_, name)(DisasContext *ctx) \
2482 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2483 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2484 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2486 /* Sub functions with one operand and one immediate */
2487 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
2488 add_ca, compute_ca, compute_ov) \
2489 static void glue(gen_, name)(DisasContext *ctx) \
2491 TCGv t0 = tcg_const_tl(const_val); \
2492 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2493 cpu_gpr[rA(ctx->opcode)], t0, \
2494 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2495 tcg_temp_free(t0); \
2497 /* subf subf. subfo subfo. */
2498 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2499 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2500 /* subfc subfc. subfco subfco. */
2501 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2502 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2503 /* subfe subfe. subfeo subfo. */
2504 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2505 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2506 /* subfme subfme. subfmeo subfmeo. */
2507 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2508 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2509 /* subfze subfze. subfzeo subfzeo.*/
2510 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2511 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2513 /* subfic */
2514 static void gen_subfic(DisasContext *ctx)
2516 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2517 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2518 c, 0, 1, 0, 0);
2519 tcg_temp_free(c);
2522 /* neg neg. nego nego. */
2523 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2525 TCGv zero = tcg_const_tl(0);
2526 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2527 zero, 0, 0, compute_ov, Rc(ctx->opcode));
2528 tcg_temp_free(zero);
2531 static void gen_neg(DisasContext *ctx)
2533 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2534 if (unlikely(Rc(ctx->opcode))) {
2535 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2539 static void gen_nego(DisasContext *ctx)
2541 gen_op_arith_neg(ctx, 1);
2544 /*** Integer logical ***/
2545 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
2546 static void glue(gen_, name)(DisasContext *ctx) \
2548 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
2549 cpu_gpr[rB(ctx->opcode)]); \
2550 if (unlikely(Rc(ctx->opcode) != 0)) \
2551 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2554 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
2555 static void glue(gen_, name)(DisasContext *ctx) \
2557 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
2558 if (unlikely(Rc(ctx->opcode) != 0)) \
2559 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2562 /* and & and. */
2563 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2564 /* andc & andc. */
2565 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2567 /* andi. */
2568 static void gen_andi_(DisasContext *ctx)
2570 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2571 UIMM(ctx->opcode));
2572 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2575 /* andis. */
2576 static void gen_andis_(DisasContext *ctx)
2578 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2579 UIMM(ctx->opcode) << 16);
2580 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2583 /* cntlzw */
2584 static void gen_cntlzw(DisasContext *ctx)
2586 TCGv_i32 t = tcg_temp_new_i32();
2588 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2589 tcg_gen_clzi_i32(t, t, 32);
2590 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2591 tcg_temp_free_i32(t);
2593 if (unlikely(Rc(ctx->opcode) != 0)) {
2594 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2598 /* cnttzw */
2599 static void gen_cnttzw(DisasContext *ctx)
2601 TCGv_i32 t = tcg_temp_new_i32();
2603 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2604 tcg_gen_ctzi_i32(t, t, 32);
2605 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2606 tcg_temp_free_i32(t);
2608 if (unlikely(Rc(ctx->opcode) != 0)) {
2609 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2613 /* eqv & eqv. */
2614 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2615 /* extsb & extsb. */
2616 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2617 /* extsh & extsh. */
2618 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2619 /* nand & nand. */
2620 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2621 /* nor & nor. */
2622 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2624 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2625 static void gen_pause(DisasContext *ctx)
2627 TCGv_i32 t0 = tcg_const_i32(0);
2628 tcg_gen_st_i32(t0, cpu_env,
2629 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2630 tcg_temp_free_i32(t0);
2632 /* Stop translation, this gives other CPUs a chance to run */
2633 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2635 #endif /* defined(TARGET_PPC64) */
2637 /* or & or. */
2638 static void gen_or(DisasContext *ctx)
2640 int rs, ra, rb;
2642 rs = rS(ctx->opcode);
2643 ra = rA(ctx->opcode);
2644 rb = rB(ctx->opcode);
2645 /* Optimisation for mr. ri case */
2646 if (rs != ra || rs != rb) {
2647 if (rs != rb) {
2648 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2649 } else {
2650 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2652 if (unlikely(Rc(ctx->opcode) != 0)) {
2653 gen_set_Rc0(ctx, cpu_gpr[ra]);
2655 } else if (unlikely(Rc(ctx->opcode) != 0)) {
2656 gen_set_Rc0(ctx, cpu_gpr[rs]);
2657 #if defined(TARGET_PPC64)
2658 } else if (rs != 0) { /* 0 is nop */
2659 int prio = 0;
2661 switch (rs) {
2662 case 1:
2663 /* Set process priority to low */
2664 prio = 2;
2665 break;
2666 case 6:
2667 /* Set process priority to medium-low */
2668 prio = 3;
2669 break;
2670 case 2:
2671 /* Set process priority to normal */
2672 prio = 4;
2673 break;
2674 #if !defined(CONFIG_USER_ONLY)
2675 case 31:
2676 if (!ctx->pr) {
2677 /* Set process priority to very low */
2678 prio = 1;
2680 break;
2681 case 5:
2682 if (!ctx->pr) {
2683 /* Set process priority to medium-hight */
2684 prio = 5;
2686 break;
2687 case 3:
2688 if (!ctx->pr) {
2689 /* Set process priority to high */
2690 prio = 6;
2692 break;
2693 case 7:
2694 if (ctx->hv && !ctx->pr) {
2695 /* Set process priority to very high */
2696 prio = 7;
2698 break;
2699 #endif
2700 default:
2701 break;
2703 if (prio) {
2704 TCGv t0 = tcg_temp_new();
2705 gen_load_spr(t0, SPR_PPR);
2706 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2707 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2708 gen_store_spr(SPR_PPR, t0);
2709 tcg_temp_free(t0);
2711 #if !defined(CONFIG_USER_ONLY)
2713 * Pause out of TCG otherwise spin loops with smt_low eat too
2714 * much CPU and the kernel hangs. This applies to all
2715 * encodings other than no-op, e.g., miso(rs=26), yield(27),
2716 * mdoio(29), mdoom(30), and all currently undefined.
2718 gen_pause(ctx);
2719 #endif
2720 #endif
2723 /* orc & orc. */
2724 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2726 /* xor & xor. */
2727 static void gen_xor(DisasContext *ctx)
2729 /* Optimisation for "set to zero" case */
2730 if (rS(ctx->opcode) != rB(ctx->opcode)) {
2731 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2732 cpu_gpr[rB(ctx->opcode)]);
2733 } else {
2734 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2736 if (unlikely(Rc(ctx->opcode) != 0)) {
2737 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2741 /* ori */
2742 static void gen_ori(DisasContext *ctx)
2744 target_ulong uimm = UIMM(ctx->opcode);
2746 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2747 return;
2749 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2752 /* oris */
2753 static void gen_oris(DisasContext *ctx)
2755 target_ulong uimm = UIMM(ctx->opcode);
2757 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2758 /* NOP */
2759 return;
2761 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2762 uimm << 16);
2765 /* xori */
2766 static void gen_xori(DisasContext *ctx)
2768 target_ulong uimm = UIMM(ctx->opcode);
2770 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2771 /* NOP */
2772 return;
2774 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2777 /* xoris */
2778 static void gen_xoris(DisasContext *ctx)
2780 target_ulong uimm = UIMM(ctx->opcode);
2782 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2783 /* NOP */
2784 return;
2786 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2787 uimm << 16);
2790 /* popcntb : PowerPC 2.03 specification */
2791 static void gen_popcntb(DisasContext *ctx)
2793 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2796 static void gen_popcntw(DisasContext *ctx)
2798 #if defined(TARGET_PPC64)
2799 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2800 #else
2801 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2802 #endif
2805 #if defined(TARGET_PPC64)
2806 /* popcntd: PowerPC 2.06 specification */
2807 static void gen_popcntd(DisasContext *ctx)
2809 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2811 #endif
2813 /* prtyw: PowerPC 2.05 specification */
2814 static void gen_prtyw(DisasContext *ctx)
2816 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2817 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2818 TCGv t0 = tcg_temp_new();
2819 tcg_gen_shri_tl(t0, rs, 16);
2820 tcg_gen_xor_tl(ra, rs, t0);
2821 tcg_gen_shri_tl(t0, ra, 8);
2822 tcg_gen_xor_tl(ra, ra, t0);
2823 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2824 tcg_temp_free(t0);
2827 #if defined(TARGET_PPC64)
2828 /* prtyd: PowerPC 2.05 specification */
2829 static void gen_prtyd(DisasContext *ctx)
2831 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2832 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2833 TCGv t0 = tcg_temp_new();
2834 tcg_gen_shri_tl(t0, rs, 32);
2835 tcg_gen_xor_tl(ra, rs, t0);
2836 tcg_gen_shri_tl(t0, ra, 16);
2837 tcg_gen_xor_tl(ra, ra, t0);
2838 tcg_gen_shri_tl(t0, ra, 8);
2839 tcg_gen_xor_tl(ra, ra, t0);
2840 tcg_gen_andi_tl(ra, ra, 1);
2841 tcg_temp_free(t0);
2843 #endif
2845 #if defined(TARGET_PPC64)
2846 /* bpermd */
2847 static void gen_bpermd(DisasContext *ctx)
2849 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2850 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2852 #endif
2854 #if defined(TARGET_PPC64)
2855 /* extsw & extsw. */
2856 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2858 /* cntlzd */
2859 static void gen_cntlzd(DisasContext *ctx)
2861 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2862 if (unlikely(Rc(ctx->opcode) != 0)) {
2863 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2867 /* cnttzd */
2868 static void gen_cnttzd(DisasContext *ctx)
2870 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2871 if (unlikely(Rc(ctx->opcode) != 0)) {
2872 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2876 /* darn */
2877 static void gen_darn(DisasContext *ctx)
2879 int l = L(ctx->opcode);
2881 if (l > 2) {
2882 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2883 } else {
2884 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2885 gen_io_start();
2887 if (l == 0) {
2888 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2889 } else {
2890 /* Return 64-bit random for both CRN and RRN */
2891 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2893 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2894 gen_stop_exception(ctx);
2898 #endif
2900 /*** Integer rotate ***/
2902 /* rlwimi & rlwimi. */
2903 static void gen_rlwimi(DisasContext *ctx)
2905 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2906 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2907 uint32_t sh = SH(ctx->opcode);
2908 uint32_t mb = MB(ctx->opcode);
2909 uint32_t me = ME(ctx->opcode);
2911 if (sh == (31 - me) && mb <= me) {
2912 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2913 } else {
2914 target_ulong mask;
2915 bool mask_in_32b = true;
2916 TCGv t1;
2918 #if defined(TARGET_PPC64)
2919 mb += 32;
2920 me += 32;
2921 #endif
2922 mask = MASK(mb, me);
2924 #if defined(TARGET_PPC64)
2925 if (mask > 0xffffffffu) {
2926 mask_in_32b = false;
2928 #endif
2929 t1 = tcg_temp_new();
2930 if (mask_in_32b) {
2931 TCGv_i32 t0 = tcg_temp_new_i32();
2932 tcg_gen_trunc_tl_i32(t0, t_rs);
2933 tcg_gen_rotli_i32(t0, t0, sh);
2934 tcg_gen_extu_i32_tl(t1, t0);
2935 tcg_temp_free_i32(t0);
2936 } else {
2937 #if defined(TARGET_PPC64)
2938 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2939 tcg_gen_rotli_i64(t1, t1, sh);
2940 #else
2941 g_assert_not_reached();
2942 #endif
2945 tcg_gen_andi_tl(t1, t1, mask);
2946 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2947 tcg_gen_or_tl(t_ra, t_ra, t1);
2948 tcg_temp_free(t1);
2950 if (unlikely(Rc(ctx->opcode) != 0)) {
2951 gen_set_Rc0(ctx, t_ra);
2955 /* rlwinm & rlwinm. */
2956 static void gen_rlwinm(DisasContext *ctx)
2958 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2959 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2960 int sh = SH(ctx->opcode);
2961 int mb = MB(ctx->opcode);
2962 int me = ME(ctx->opcode);
2963 int len = me - mb + 1;
2964 int rsh = (32 - sh) & 31;
2966 if (sh != 0 && len > 0 && me == (31 - sh)) {
2967 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2968 } else if (me == 31 && rsh + len <= 32) {
2969 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2970 } else {
2971 target_ulong mask;
2972 bool mask_in_32b = true;
2973 #if defined(TARGET_PPC64)
2974 mb += 32;
2975 me += 32;
2976 #endif
2977 mask = MASK(mb, me);
2978 #if defined(TARGET_PPC64)
2979 if (mask > 0xffffffffu) {
2980 mask_in_32b = false;
2982 #endif
2983 if (mask_in_32b) {
2984 if (sh == 0) {
2985 tcg_gen_andi_tl(t_ra, t_rs, mask);
2986 } else {
2987 TCGv_i32 t0 = tcg_temp_new_i32();
2988 tcg_gen_trunc_tl_i32(t0, t_rs);
2989 tcg_gen_rotli_i32(t0, t0, sh);
2990 tcg_gen_andi_i32(t0, t0, mask);
2991 tcg_gen_extu_i32_tl(t_ra, t0);
2992 tcg_temp_free_i32(t0);
2994 } else {
2995 #if defined(TARGET_PPC64)
2996 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2997 tcg_gen_rotli_i64(t_ra, t_ra, sh);
2998 tcg_gen_andi_i64(t_ra, t_ra, mask);
2999 #else
3000 g_assert_not_reached();
3001 #endif
3004 if (unlikely(Rc(ctx->opcode) != 0)) {
3005 gen_set_Rc0(ctx, t_ra);
3009 /* rlwnm & rlwnm. */
3010 static void gen_rlwnm(DisasContext *ctx)
3012 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3013 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3014 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
3015 uint32_t mb = MB(ctx->opcode);
3016 uint32_t me = ME(ctx->opcode);
3017 target_ulong mask;
3018 bool mask_in_32b = true;
3020 #if defined(TARGET_PPC64)
3021 mb += 32;
3022 me += 32;
3023 #endif
3024 mask = MASK(mb, me);
3026 #if defined(TARGET_PPC64)
3027 if (mask > 0xffffffffu) {
3028 mask_in_32b = false;
3030 #endif
3031 if (mask_in_32b) {
3032 TCGv_i32 t0 = tcg_temp_new_i32();
3033 TCGv_i32 t1 = tcg_temp_new_i32();
3034 tcg_gen_trunc_tl_i32(t0, t_rb);
3035 tcg_gen_trunc_tl_i32(t1, t_rs);
3036 tcg_gen_andi_i32(t0, t0, 0x1f);
3037 tcg_gen_rotl_i32(t1, t1, t0);
3038 tcg_gen_extu_i32_tl(t_ra, t1);
3039 tcg_temp_free_i32(t0);
3040 tcg_temp_free_i32(t1);
3041 } else {
3042 #if defined(TARGET_PPC64)
3043 TCGv_i64 t0 = tcg_temp_new_i64();
3044 tcg_gen_andi_i64(t0, t_rb, 0x1f);
3045 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
3046 tcg_gen_rotl_i64(t_ra, t_ra, t0);
3047 tcg_temp_free_i64(t0);
3048 #else
3049 g_assert_not_reached();
3050 #endif
3053 tcg_gen_andi_tl(t_ra, t_ra, mask);
3055 if (unlikely(Rc(ctx->opcode) != 0)) {
3056 gen_set_Rc0(ctx, t_ra);
3060 #if defined(TARGET_PPC64)
3061 #define GEN_PPC64_R2(name, opc1, opc2) \
3062 static void glue(gen_, name##0)(DisasContext *ctx) \
3064 gen_##name(ctx, 0); \
3067 static void glue(gen_, name##1)(DisasContext *ctx) \
3069 gen_##name(ctx, 1); \
3071 #define GEN_PPC64_R4(name, opc1, opc2) \
3072 static void glue(gen_, name##0)(DisasContext *ctx) \
3074 gen_##name(ctx, 0, 0); \
3077 static void glue(gen_, name##1)(DisasContext *ctx) \
3079 gen_##name(ctx, 0, 1); \
3082 static void glue(gen_, name##2)(DisasContext *ctx) \
3084 gen_##name(ctx, 1, 0); \
3087 static void glue(gen_, name##3)(DisasContext *ctx) \
3089 gen_##name(ctx, 1, 1); \
3092 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
3094 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3095 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3096 int len = me - mb + 1;
3097 int rsh = (64 - sh) & 63;
3099 if (sh != 0 && len > 0 && me == (63 - sh)) {
3100 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
3101 } else if (me == 63 && rsh + len <= 64) {
3102 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
3103 } else {
3104 tcg_gen_rotli_tl(t_ra, t_rs, sh);
3105 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
3107 if (unlikely(Rc(ctx->opcode) != 0)) {
3108 gen_set_Rc0(ctx, t_ra);
3112 /* rldicl - rldicl. */
3113 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
3115 uint32_t sh, mb;
3117 sh = SH(ctx->opcode) | (shn << 5);
3118 mb = MB(ctx->opcode) | (mbn << 5);
3119 gen_rldinm(ctx, mb, 63, sh);
3121 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
3123 /* rldicr - rldicr. */
3124 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
3126 uint32_t sh, me;
3128 sh = SH(ctx->opcode) | (shn << 5);
3129 me = MB(ctx->opcode) | (men << 5);
3130 gen_rldinm(ctx, 0, me, sh);
3132 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
3134 /* rldic - rldic. */
3135 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
3137 uint32_t sh, mb;
3139 sh = SH(ctx->opcode) | (shn << 5);
3140 mb = MB(ctx->opcode) | (mbn << 5);
3141 gen_rldinm(ctx, mb, 63 - sh, sh);
3143 GEN_PPC64_R4(rldic, 0x1E, 0x04);
3145 static void gen_rldnm(DisasContext *ctx, int mb, int me)
3147 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3148 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3149 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
3150 TCGv t0;
3152 t0 = tcg_temp_new();
3153 tcg_gen_andi_tl(t0, t_rb, 0x3f);
3154 tcg_gen_rotl_tl(t_ra, t_rs, t0);
3155 tcg_temp_free(t0);
3157 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
3158 if (unlikely(Rc(ctx->opcode) != 0)) {
3159 gen_set_Rc0(ctx, t_ra);
3163 /* rldcl - rldcl. */
3164 static inline void gen_rldcl(DisasContext *ctx, int mbn)
3166 uint32_t mb;
3168 mb = MB(ctx->opcode) | (mbn << 5);
3169 gen_rldnm(ctx, mb, 63);
3171 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
3173 /* rldcr - rldcr. */
3174 static inline void gen_rldcr(DisasContext *ctx, int men)
3176 uint32_t me;
3178 me = MB(ctx->opcode) | (men << 5);
3179 gen_rldnm(ctx, 0, me);
3181 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
3183 /* rldimi - rldimi. */
3184 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
3186 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3187 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3188 uint32_t sh = SH(ctx->opcode) | (shn << 5);
3189 uint32_t mb = MB(ctx->opcode) | (mbn << 5);
3190 uint32_t me = 63 - sh;
3192 if (mb <= me) {
3193 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
3194 } else {
3195 target_ulong mask = MASK(mb, me);
3196 TCGv t1 = tcg_temp_new();
3198 tcg_gen_rotli_tl(t1, t_rs, sh);
3199 tcg_gen_andi_tl(t1, t1, mask);
3200 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
3201 tcg_gen_or_tl(t_ra, t_ra, t1);
3202 tcg_temp_free(t1);
3204 if (unlikely(Rc(ctx->opcode) != 0)) {
3205 gen_set_Rc0(ctx, t_ra);
3208 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
3209 #endif
3211 /*** Integer shift ***/
3213 /* slw & slw. */
3214 static void gen_slw(DisasContext *ctx)
3216 TCGv t0, t1;
3218 t0 = tcg_temp_new();
3219 /* AND rS with a mask that is 0 when rB >= 0x20 */
3220 #if defined(TARGET_PPC64)
3221 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3222 tcg_gen_sari_tl(t0, t0, 0x3f);
3223 #else
3224 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3225 tcg_gen_sari_tl(t0, t0, 0x1f);
3226 #endif
3227 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3228 t1 = tcg_temp_new();
3229 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3230 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3231 tcg_temp_free(t1);
3232 tcg_temp_free(t0);
3233 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
3234 if (unlikely(Rc(ctx->opcode) != 0)) {
3235 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3239 /* sraw & sraw. */
3240 static void gen_sraw(DisasContext *ctx)
3242 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
3243 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3244 if (unlikely(Rc(ctx->opcode) != 0)) {
3245 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3249 /* srawi & srawi. */
3250 static void gen_srawi(DisasContext *ctx)
3252 int sh = SH(ctx->opcode);
3253 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3254 TCGv src = cpu_gpr[rS(ctx->opcode)];
3255 if (sh == 0) {
3256 tcg_gen_ext32s_tl(dst, src);
3257 tcg_gen_movi_tl(cpu_ca, 0);
3258 if (is_isa300(ctx)) {
3259 tcg_gen_movi_tl(cpu_ca32, 0);
3261 } else {
3262 TCGv t0;
3263 tcg_gen_ext32s_tl(dst, src);
3264 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
3265 t0 = tcg_temp_new();
3266 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
3267 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3268 tcg_temp_free(t0);
3269 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3270 if (is_isa300(ctx)) {
3271 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3273 tcg_gen_sari_tl(dst, dst, sh);
3275 if (unlikely(Rc(ctx->opcode) != 0)) {
3276 gen_set_Rc0(ctx, dst);
3280 /* srw & srw. */
3281 static void gen_srw(DisasContext *ctx)
3283 TCGv t0, t1;
3285 t0 = tcg_temp_new();
3286 /* AND rS with a mask that is 0 when rB >= 0x20 */
3287 #if defined(TARGET_PPC64)
3288 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3289 tcg_gen_sari_tl(t0, t0, 0x3f);
3290 #else
3291 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3292 tcg_gen_sari_tl(t0, t0, 0x1f);
3293 #endif
3294 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3295 tcg_gen_ext32u_tl(t0, t0);
3296 t1 = tcg_temp_new();
3297 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3298 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3299 tcg_temp_free(t1);
3300 tcg_temp_free(t0);
3301 if (unlikely(Rc(ctx->opcode) != 0)) {
3302 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3306 #if defined(TARGET_PPC64)
3307 /* sld & sld. */
3308 static void gen_sld(DisasContext *ctx)
3310 TCGv t0, t1;
3312 t0 = tcg_temp_new();
3313 /* AND rS with a mask that is 0 when rB >= 0x40 */
3314 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3315 tcg_gen_sari_tl(t0, t0, 0x3f);
3316 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3317 t1 = tcg_temp_new();
3318 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3319 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3320 tcg_temp_free(t1);
3321 tcg_temp_free(t0);
3322 if (unlikely(Rc(ctx->opcode) != 0)) {
3323 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3327 /* srad & srad. */
3328 static void gen_srad(DisasContext *ctx)
3330 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3331 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3332 if (unlikely(Rc(ctx->opcode) != 0)) {
3333 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3336 /* sradi & sradi. */
3337 static inline void gen_sradi(DisasContext *ctx, int n)
3339 int sh = SH(ctx->opcode) + (n << 5);
3340 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3341 TCGv src = cpu_gpr[rS(ctx->opcode)];
3342 if (sh == 0) {
3343 tcg_gen_mov_tl(dst, src);
3344 tcg_gen_movi_tl(cpu_ca, 0);
3345 if (is_isa300(ctx)) {
3346 tcg_gen_movi_tl(cpu_ca32, 0);
3348 } else {
3349 TCGv t0;
3350 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3351 t0 = tcg_temp_new();
3352 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3353 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3354 tcg_temp_free(t0);
3355 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3356 if (is_isa300(ctx)) {
3357 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3359 tcg_gen_sari_tl(dst, src, sh);
3361 if (unlikely(Rc(ctx->opcode) != 0)) {
3362 gen_set_Rc0(ctx, dst);
3366 static void gen_sradi0(DisasContext *ctx)
3368 gen_sradi(ctx, 0);
3371 static void gen_sradi1(DisasContext *ctx)
3373 gen_sradi(ctx, 1);
3376 /* extswsli & extswsli. */
3377 static inline void gen_extswsli(DisasContext *ctx, int n)
3379 int sh = SH(ctx->opcode) + (n << 5);
3380 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3381 TCGv src = cpu_gpr[rS(ctx->opcode)];
3383 tcg_gen_ext32s_tl(dst, src);
3384 tcg_gen_shli_tl(dst, dst, sh);
3385 if (unlikely(Rc(ctx->opcode) != 0)) {
3386 gen_set_Rc0(ctx, dst);
3390 static void gen_extswsli0(DisasContext *ctx)
3392 gen_extswsli(ctx, 0);
3395 static void gen_extswsli1(DisasContext *ctx)
3397 gen_extswsli(ctx, 1);
3400 /* srd & srd. */
3401 static void gen_srd(DisasContext *ctx)
3403 TCGv t0, t1;
3405 t0 = tcg_temp_new();
3406 /* AND rS with a mask that is 0 when rB >= 0x40 */
3407 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3408 tcg_gen_sari_tl(t0, t0, 0x3f);
3409 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3410 t1 = tcg_temp_new();
3411 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3412 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3413 tcg_temp_free(t1);
3414 tcg_temp_free(t0);
3415 if (unlikely(Rc(ctx->opcode) != 0)) {
3416 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3419 #endif
3421 /*** Addressing modes ***/
3422 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3423 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3424 target_long maskl)
3426 target_long simm = SIMM(ctx->opcode);
3428 simm &= ~maskl;
3429 if (rA(ctx->opcode) == 0) {
3430 if (NARROW_MODE(ctx)) {
3431 simm = (uint32_t)simm;
3433 tcg_gen_movi_tl(EA, simm);
3434 } else if (likely(simm != 0)) {
3435 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3436 if (NARROW_MODE(ctx)) {
3437 tcg_gen_ext32u_tl(EA, EA);
3439 } else {
3440 if (NARROW_MODE(ctx)) {
3441 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3442 } else {
3443 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3448 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3450 if (rA(ctx->opcode) == 0) {
3451 if (NARROW_MODE(ctx)) {
3452 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3453 } else {
3454 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3456 } else {
3457 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3458 if (NARROW_MODE(ctx)) {
3459 tcg_gen_ext32u_tl(EA, EA);
3464 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3466 if (rA(ctx->opcode) == 0) {
3467 tcg_gen_movi_tl(EA, 0);
3468 } else if (NARROW_MODE(ctx)) {
3469 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3470 } else {
3471 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3475 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3476 target_long val)
3478 tcg_gen_addi_tl(ret, arg1, val);
3479 if (NARROW_MODE(ctx)) {
3480 tcg_gen_ext32u_tl(ret, ret);
3484 static inline void gen_align_no_le(DisasContext *ctx)
3486 gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3487 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3490 /*** Integer load ***/
3491 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3492 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3494 #define GEN_QEMU_LOAD_TL(ldop, op) \
3495 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \
3496 TCGv val, \
3497 TCGv addr) \
3499 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \
3502 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB))
3503 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3504 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3505 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3506 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3508 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3509 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3511 #define GEN_QEMU_LOAD_64(ldop, op) \
3512 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \
3513 TCGv_i64 val, \
3514 TCGv addr) \
3516 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \
3519 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB))
3520 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3521 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3522 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3523 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q))
3525 #if defined(TARGET_PPC64)
3526 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q))
3527 #endif
3529 #define GEN_QEMU_STORE_TL(stop, op) \
3530 static void glue(gen_qemu_, stop)(DisasContext *ctx, \
3531 TCGv val, \
3532 TCGv addr) \
3534 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \
3537 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB))
3538 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3539 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3541 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3542 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3544 #define GEN_QEMU_STORE_64(stop, op) \
3545 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \
3546 TCGv_i64 val, \
3547 TCGv addr) \
3549 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \
3552 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB))
3553 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3554 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3555 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q))
3557 #if defined(TARGET_PPC64)
3558 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q))
3559 #endif
3561 #define GEN_LD(name, ldop, opc, type) \
3562 static void glue(gen_, name)(DisasContext *ctx) \
3564 TCGv EA; \
3565 gen_set_access_type(ctx, ACCESS_INT); \
3566 EA = tcg_temp_new(); \
3567 gen_addr_imm_index(ctx, EA, 0); \
3568 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3569 tcg_temp_free(EA); \
3572 #define GEN_LDU(name, ldop, opc, type) \
3573 static void glue(gen_, name##u)(DisasContext *ctx) \
3575 TCGv EA; \
3576 if (unlikely(rA(ctx->opcode) == 0 || \
3577 rA(ctx->opcode) == rD(ctx->opcode))) { \
3578 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3579 return; \
3581 gen_set_access_type(ctx, ACCESS_INT); \
3582 EA = tcg_temp_new(); \
3583 if (type == PPC_64B) \
3584 gen_addr_imm_index(ctx, EA, 0x03); \
3585 else \
3586 gen_addr_imm_index(ctx, EA, 0); \
3587 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3588 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3589 tcg_temp_free(EA); \
3592 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
3593 static void glue(gen_, name##ux)(DisasContext *ctx) \
3595 TCGv EA; \
3596 if (unlikely(rA(ctx->opcode) == 0 || \
3597 rA(ctx->opcode) == rD(ctx->opcode))) { \
3598 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3599 return; \
3601 gen_set_access_type(ctx, ACCESS_INT); \
3602 EA = tcg_temp_new(); \
3603 gen_addr_reg_index(ctx, EA); \
3604 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3605 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3606 tcg_temp_free(EA); \
3609 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
3610 static void glue(gen_, name##x)(DisasContext *ctx) \
3612 TCGv EA; \
3613 chk; \
3614 gen_set_access_type(ctx, ACCESS_INT); \
3615 EA = tcg_temp_new(); \
3616 gen_addr_reg_index(ctx, EA); \
3617 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3618 tcg_temp_free(EA); \
3621 #define GEN_LDX(name, ldop, opc2, opc3, type) \
3622 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3624 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
3625 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3627 #define GEN_LDS(name, ldop, op, type) \
3628 GEN_LD(name, ldop, op | 0x20, type); \
3629 GEN_LDU(name, ldop, op | 0x21, type); \
3630 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
3631 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
3633 /* lbz lbzu lbzux lbzx */
3634 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
3635 /* lha lhau lhaux lhax */
3636 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
3637 /* lhz lhzu lhzux lhzx */
3638 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
3639 /* lwz lwzu lwzux lwzx */
3640 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
3642 #define GEN_LDEPX(name, ldop, opc2, opc3) \
3643 static void glue(gen_, name##epx)(DisasContext *ctx) \
3645 TCGv EA; \
3646 CHK_SV; \
3647 gen_set_access_type(ctx, ACCESS_INT); \
3648 EA = tcg_temp_new(); \
3649 gen_addr_reg_index(ctx, EA); \
3650 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3651 tcg_temp_free(EA); \
3654 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3655 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3656 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3657 #if defined(TARGET_PPC64)
3658 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
3659 #endif
3661 #if defined(TARGET_PPC64)
3662 /* lwaux */
3663 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
3664 /* lwax */
3665 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
3666 /* ldux */
3667 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B);
3668 /* ldx */
3669 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B);
3671 /* CI load/store variants */
3672 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3673 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3674 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3675 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3677 static void gen_ld(DisasContext *ctx)
3679 TCGv EA;
3680 if (Rc(ctx->opcode)) {
3681 if (unlikely(rA(ctx->opcode) == 0 ||
3682 rA(ctx->opcode) == rD(ctx->opcode))) {
3683 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3684 return;
3687 gen_set_access_type(ctx, ACCESS_INT);
3688 EA = tcg_temp_new();
3689 gen_addr_imm_index(ctx, EA, 0x03);
3690 if (ctx->opcode & 0x02) {
3691 /* lwa (lwau is undefined) */
3692 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3693 } else {
3694 /* ld - ldu */
3695 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3697 if (Rc(ctx->opcode)) {
3698 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3700 tcg_temp_free(EA);
3703 /* lq */
3704 static void gen_lq(DisasContext *ctx)
3706 int ra, rd;
3707 TCGv EA, hi, lo;
3709 /* lq is a legal user mode instruction starting in ISA 2.07 */
3710 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3711 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3713 if (!legal_in_user_mode && ctx->pr) {
3714 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3715 return;
3718 if (!le_is_supported && ctx->le_mode) {
3719 gen_align_no_le(ctx);
3720 return;
3722 ra = rA(ctx->opcode);
3723 rd = rD(ctx->opcode);
3724 if (unlikely((rd & 1) || rd == ra)) {
3725 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3726 return;
3729 gen_set_access_type(ctx, ACCESS_INT);
3730 EA = tcg_temp_new();
3731 gen_addr_imm_index(ctx, EA, 0x0F);
3733 /* Note that the low part is always in RD+1, even in LE mode. */
3734 lo = cpu_gpr[rd + 1];
3735 hi = cpu_gpr[rd];
3737 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3738 if (HAVE_ATOMIC128) {
3739 TCGv_i32 oi = tcg_temp_new_i32();
3740 if (ctx->le_mode) {
3741 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3742 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3743 } else {
3744 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3745 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3747 tcg_temp_free_i32(oi);
3748 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3749 } else {
3750 /* Restart with exclusive lock. */
3751 gen_helper_exit_atomic(cpu_env);
3752 ctx->base.is_jmp = DISAS_NORETURN;
3754 } else if (ctx->le_mode) {
3755 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3756 gen_addr_add(ctx, EA, EA, 8);
3757 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3758 } else {
3759 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3760 gen_addr_add(ctx, EA, EA, 8);
3761 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3763 tcg_temp_free(EA);
3765 #endif
3767 /*** Integer store ***/
3768 #define GEN_ST(name, stop, opc, type) \
3769 static void glue(gen_, name)(DisasContext *ctx) \
3771 TCGv EA; \
3772 gen_set_access_type(ctx, ACCESS_INT); \
3773 EA = tcg_temp_new(); \
3774 gen_addr_imm_index(ctx, EA, 0); \
3775 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3776 tcg_temp_free(EA); \
3779 #define GEN_STU(name, stop, opc, type) \
3780 static void glue(gen_, stop##u)(DisasContext *ctx) \
3782 TCGv EA; \
3783 if (unlikely(rA(ctx->opcode) == 0)) { \
3784 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3785 return; \
3787 gen_set_access_type(ctx, ACCESS_INT); \
3788 EA = tcg_temp_new(); \
3789 if (type == PPC_64B) \
3790 gen_addr_imm_index(ctx, EA, 0x03); \
3791 else \
3792 gen_addr_imm_index(ctx, EA, 0); \
3793 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3794 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3795 tcg_temp_free(EA); \
3798 #define GEN_STUX(name, stop, opc2, opc3, type) \
3799 static void glue(gen_, name##ux)(DisasContext *ctx) \
3801 TCGv EA; \
3802 if (unlikely(rA(ctx->opcode) == 0)) { \
3803 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3804 return; \
3806 gen_set_access_type(ctx, ACCESS_INT); \
3807 EA = tcg_temp_new(); \
3808 gen_addr_reg_index(ctx, EA); \
3809 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3810 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3811 tcg_temp_free(EA); \
3814 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
3815 static void glue(gen_, name##x)(DisasContext *ctx) \
3817 TCGv EA; \
3818 chk; \
3819 gen_set_access_type(ctx, ACCESS_INT); \
3820 EA = tcg_temp_new(); \
3821 gen_addr_reg_index(ctx, EA); \
3822 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3823 tcg_temp_free(EA); \
3825 #define GEN_STX(name, stop, opc2, opc3, type) \
3826 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3828 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
3829 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3831 #define GEN_STS(name, stop, op, type) \
3832 GEN_ST(name, stop, op | 0x20, type); \
3833 GEN_STU(name, stop, op | 0x21, type); \
3834 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
3835 GEN_STX(name, stop, 0x17, op | 0x00, type)
3837 /* stb stbu stbux stbx */
3838 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
3839 /* sth sthu sthux sthx */
3840 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
3841 /* stw stwu stwux stwx */
3842 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
3844 #define GEN_STEPX(name, stop, opc2, opc3) \
3845 static void glue(gen_, name##epx)(DisasContext *ctx) \
3847 TCGv EA; \
3848 CHK_SV; \
3849 gen_set_access_type(ctx, ACCESS_INT); \
3850 EA = tcg_temp_new(); \
3851 gen_addr_reg_index(ctx, EA); \
3852 tcg_gen_qemu_st_tl( \
3853 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \
3854 tcg_temp_free(EA); \
3857 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3858 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3859 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3860 #if defined(TARGET_PPC64)
3861 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04)
3862 #endif
3864 #if defined(TARGET_PPC64)
3865 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B);
3866 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B);
3867 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3868 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3869 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3870 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3872 static void gen_std(DisasContext *ctx)
3874 int rs;
3875 TCGv EA;
3877 rs = rS(ctx->opcode);
3878 if ((ctx->opcode & 0x3) == 0x2) { /* stq */
3879 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3880 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3881 TCGv hi, lo;
3883 if (!(ctx->insns_flags & PPC_64BX)) {
3884 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3887 if (!legal_in_user_mode && ctx->pr) {
3888 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3889 return;
3892 if (!le_is_supported && ctx->le_mode) {
3893 gen_align_no_le(ctx);
3894 return;
3897 if (unlikely(rs & 1)) {
3898 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3899 return;
3901 gen_set_access_type(ctx, ACCESS_INT);
3902 EA = tcg_temp_new();
3903 gen_addr_imm_index(ctx, EA, 0x03);
3905 /* Note that the low part is always in RS+1, even in LE mode. */
3906 lo = cpu_gpr[rs + 1];
3907 hi = cpu_gpr[rs];
3909 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3910 if (HAVE_ATOMIC128) {
3911 TCGv_i32 oi = tcg_temp_new_i32();
3912 if (ctx->le_mode) {
3913 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3914 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi);
3915 } else {
3916 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3917 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi);
3919 tcg_temp_free_i32(oi);
3920 } else {
3921 /* Restart with exclusive lock. */
3922 gen_helper_exit_atomic(cpu_env);
3923 ctx->base.is_jmp = DISAS_NORETURN;
3925 } else if (ctx->le_mode) {
3926 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3927 gen_addr_add(ctx, EA, EA, 8);
3928 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3929 } else {
3930 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3931 gen_addr_add(ctx, EA, EA, 8);
3932 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3934 tcg_temp_free(EA);
3935 } else {
3936 /* std / stdu */
3937 if (Rc(ctx->opcode)) {
3938 if (unlikely(rA(ctx->opcode) == 0)) {
3939 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3940 return;
3943 gen_set_access_type(ctx, ACCESS_INT);
3944 EA = tcg_temp_new();
3945 gen_addr_imm_index(ctx, EA, 0x03);
3946 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
3947 if (Rc(ctx->opcode)) {
3948 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3950 tcg_temp_free(EA);
3953 #endif
3954 /*** Integer load and store with byte reverse ***/
3956 /* lhbrx */
3957 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3959 /* lwbrx */
3960 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3962 #if defined(TARGET_PPC64)
3963 /* ldbrx */
3964 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3965 /* stdbrx */
3966 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3967 #endif /* TARGET_PPC64 */
3969 /* sthbrx */
3970 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3971 /* stwbrx */
3972 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3974 /*** Integer load and store multiple ***/
3976 /* lmw */
3977 static void gen_lmw(DisasContext *ctx)
3979 TCGv t0;
3980 TCGv_i32 t1;
3982 if (ctx->le_mode) {
3983 gen_align_no_le(ctx);
3984 return;
3986 gen_set_access_type(ctx, ACCESS_INT);
3987 t0 = tcg_temp_new();
3988 t1 = tcg_const_i32(rD(ctx->opcode));
3989 gen_addr_imm_index(ctx, t0, 0);
3990 gen_helper_lmw(cpu_env, t0, t1);
3991 tcg_temp_free(t0);
3992 tcg_temp_free_i32(t1);
3995 /* stmw */
3996 static void gen_stmw(DisasContext *ctx)
3998 TCGv t0;
3999 TCGv_i32 t1;
4001 if (ctx->le_mode) {
4002 gen_align_no_le(ctx);
4003 return;
4005 gen_set_access_type(ctx, ACCESS_INT);
4006 t0 = tcg_temp_new();
4007 t1 = tcg_const_i32(rS(ctx->opcode));
4008 gen_addr_imm_index(ctx, t0, 0);
4009 gen_helper_stmw(cpu_env, t0, t1);
4010 tcg_temp_free(t0);
4011 tcg_temp_free_i32(t1);
4014 /*** Integer load and store strings ***/
4016 /* lswi */
4018 * PowerPC32 specification says we must generate an exception if rA is
4019 * in the range of registers to be loaded. In an other hand, IBM says
4020 * this is valid, but rA won't be loaded. For now, I'll follow the
4021 * spec...
4023 static void gen_lswi(DisasContext *ctx)
4025 TCGv t0;
4026 TCGv_i32 t1, t2;
4027 int nb = NB(ctx->opcode);
4028 int start = rD(ctx->opcode);
4029 int ra = rA(ctx->opcode);
4030 int nr;
4032 if (ctx->le_mode) {
4033 gen_align_no_le(ctx);
4034 return;
4036 if (nb == 0) {
4037 nb = 32;
4039 nr = DIV_ROUND_UP(nb, 4);
4040 if (unlikely(lsw_reg_in_range(start, nr, ra))) {
4041 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4042 return;
4044 gen_set_access_type(ctx, ACCESS_INT);
4045 t0 = tcg_temp_new();
4046 gen_addr_register(ctx, t0);
4047 t1 = tcg_const_i32(nb);
4048 t2 = tcg_const_i32(start);
4049 gen_helper_lsw(cpu_env, t0, t1, t2);
4050 tcg_temp_free(t0);
4051 tcg_temp_free_i32(t1);
4052 tcg_temp_free_i32(t2);
4055 /* lswx */
4056 static void gen_lswx(DisasContext *ctx)
4058 TCGv t0;
4059 TCGv_i32 t1, t2, t3;
4061 if (ctx->le_mode) {
4062 gen_align_no_le(ctx);
4063 return;
4065 gen_set_access_type(ctx, ACCESS_INT);
4066 t0 = tcg_temp_new();
4067 gen_addr_reg_index(ctx, t0);
4068 t1 = tcg_const_i32(rD(ctx->opcode));
4069 t2 = tcg_const_i32(rA(ctx->opcode));
4070 t3 = tcg_const_i32(rB(ctx->opcode));
4071 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
4072 tcg_temp_free(t0);
4073 tcg_temp_free_i32(t1);
4074 tcg_temp_free_i32(t2);
4075 tcg_temp_free_i32(t3);
4078 /* stswi */
4079 static void gen_stswi(DisasContext *ctx)
4081 TCGv t0;
4082 TCGv_i32 t1, t2;
4083 int nb = NB(ctx->opcode);
4085 if (ctx->le_mode) {
4086 gen_align_no_le(ctx);
4087 return;
4089 gen_set_access_type(ctx, ACCESS_INT);
4090 t0 = tcg_temp_new();
4091 gen_addr_register(ctx, t0);
4092 if (nb == 0) {
4093 nb = 32;
4095 t1 = tcg_const_i32(nb);
4096 t2 = tcg_const_i32(rS(ctx->opcode));
4097 gen_helper_stsw(cpu_env, t0, t1, t2);
4098 tcg_temp_free(t0);
4099 tcg_temp_free_i32(t1);
4100 tcg_temp_free_i32(t2);
4103 /* stswx */
4104 static void gen_stswx(DisasContext *ctx)
4106 TCGv t0;
4107 TCGv_i32 t1, t2;
4109 if (ctx->le_mode) {
4110 gen_align_no_le(ctx);
4111 return;
4113 gen_set_access_type(ctx, ACCESS_INT);
4114 t0 = tcg_temp_new();
4115 gen_addr_reg_index(ctx, t0);
4116 t1 = tcg_temp_new_i32();
4117 tcg_gen_trunc_tl_i32(t1, cpu_xer);
4118 tcg_gen_andi_i32(t1, t1, 0x7F);
4119 t2 = tcg_const_i32(rS(ctx->opcode));
4120 gen_helper_stsw(cpu_env, t0, t1, t2);
4121 tcg_temp_free(t0);
4122 tcg_temp_free_i32(t1);
4123 tcg_temp_free_i32(t2);
4126 /*** Memory synchronisation ***/
4127 /* eieio */
4128 static void gen_eieio(DisasContext *ctx)
4130 TCGBar bar = TCG_MO_LD_ST;
4133 * POWER9 has a eieio instruction variant using bit 6 as a hint to
4134 * tell the CPU it is a store-forwarding barrier.
4136 if (ctx->opcode & 0x2000000) {
4138 * ISA says that "Reserved fields in instructions are ignored
4139 * by the processor". So ignore the bit 6 on non-POWER9 CPU but
4140 * as this is not an instruction software should be using,
4141 * complain to the user.
4143 if (!(ctx->insns_flags2 & PPC2_ISA300)) {
4144 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
4145 TARGET_FMT_lx "\n", ctx->cia);
4146 } else {
4147 bar = TCG_MO_ST_LD;
4151 tcg_gen_mb(bar | TCG_BAR_SC);
4154 #if !defined(CONFIG_USER_ONLY)
4155 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
4157 TCGv_i32 t;
4158 TCGLabel *l;
4160 if (!ctx->lazy_tlb_flush) {
4161 return;
4163 l = gen_new_label();
4164 t = tcg_temp_new_i32();
4165 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
4166 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
4167 if (global) {
4168 gen_helper_check_tlb_flush_global(cpu_env);
4169 } else {
4170 gen_helper_check_tlb_flush_local(cpu_env);
4172 gen_set_label(l);
4173 tcg_temp_free_i32(t);
4175 #else
4176 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
4177 #endif
4179 /* isync */
4180 static void gen_isync(DisasContext *ctx)
4183 * We need to check for a pending TLB flush. This can only happen in
4184 * kernel mode however so check MSR_PR
4186 if (!ctx->pr) {
4187 gen_check_tlb_flush(ctx, false);
4189 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4190 gen_stop_exception(ctx);
4193 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE))
4195 static void gen_load_locked(DisasContext *ctx, MemOp memop)
4197 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
4198 TCGv t0 = tcg_temp_new();
4200 gen_set_access_type(ctx, ACCESS_RES);
4201 gen_addr_reg_index(ctx, t0);
4202 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
4203 tcg_gen_mov_tl(cpu_reserve, t0);
4204 tcg_gen_mov_tl(cpu_reserve_val, gpr);
4205 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
4206 tcg_temp_free(t0);
4209 #define LARX(name, memop) \
4210 static void gen_##name(DisasContext *ctx) \
4212 gen_load_locked(ctx, memop); \
4215 /* lwarx */
4216 LARX(lbarx, DEF_MEMOP(MO_UB))
4217 LARX(lharx, DEF_MEMOP(MO_UW))
4218 LARX(lwarx, DEF_MEMOP(MO_UL))
4220 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
4221 TCGv EA, TCGCond cond, int addend)
4223 TCGv t = tcg_temp_new();
4224 TCGv t2 = tcg_temp_new();
4225 TCGv u = tcg_temp_new();
4227 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4228 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
4229 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
4230 tcg_gen_addi_tl(u, t, addend);
4232 /* E.g. for fetch and increment bounded... */
4233 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
4234 tcg_gen_movcond_tl(cond, u, t, t2, u, t);
4235 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
4237 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
4238 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
4239 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
4241 tcg_temp_free(t);
4242 tcg_temp_free(t2);
4243 tcg_temp_free(u);
4246 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
4248 uint32_t gpr_FC = FC(ctx->opcode);
4249 TCGv EA = tcg_temp_new();
4250 int rt = rD(ctx->opcode);
4251 bool need_serial;
4252 TCGv src, dst;
4254 gen_addr_register(ctx, EA);
4255 dst = cpu_gpr[rt];
4256 src = cpu_gpr[(rt + 1) & 31];
4258 need_serial = false;
4259 memop |= MO_ALIGN;
4260 switch (gpr_FC) {
4261 case 0: /* Fetch and add */
4262 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
4263 break;
4264 case 1: /* Fetch and xor */
4265 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
4266 break;
4267 case 2: /* Fetch and or */
4268 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
4269 break;
4270 case 3: /* Fetch and 'and' */
4271 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
4272 break;
4273 case 4: /* Fetch and max unsigned */
4274 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
4275 break;
4276 case 5: /* Fetch and max signed */
4277 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
4278 break;
4279 case 6: /* Fetch and min unsigned */
4280 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
4281 break;
4282 case 7: /* Fetch and min signed */
4283 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
4284 break;
4285 case 8: /* Swap */
4286 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
4287 break;
4289 case 16: /* Compare and swap not equal */
4290 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4291 need_serial = true;
4292 } else {
4293 TCGv t0 = tcg_temp_new();
4294 TCGv t1 = tcg_temp_new();
4296 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
4297 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
4298 tcg_gen_mov_tl(t1, src);
4299 } else {
4300 tcg_gen_ext32u_tl(t1, src);
4302 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
4303 cpu_gpr[(rt + 2) & 31], t0);
4304 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
4305 tcg_gen_mov_tl(dst, t0);
4307 tcg_temp_free(t0);
4308 tcg_temp_free(t1);
4310 break;
4312 case 24: /* Fetch and increment bounded */
4313 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4314 need_serial = true;
4315 } else {
4316 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
4318 break;
4319 case 25: /* Fetch and increment equal */
4320 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4321 need_serial = true;
4322 } else {
4323 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
4325 break;
4326 case 28: /* Fetch and decrement bounded */
4327 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4328 need_serial = true;
4329 } else {
4330 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
4332 break;
4334 default:
4335 /* invoke data storage error handler */
4336 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4338 tcg_temp_free(EA);
4340 if (need_serial) {
4341 /* Restart with exclusive lock. */
4342 gen_helper_exit_atomic(cpu_env);
4343 ctx->base.is_jmp = DISAS_NORETURN;
4347 static void gen_lwat(DisasContext *ctx)
4349 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
4352 #ifdef TARGET_PPC64
4353 static void gen_ldat(DisasContext *ctx)
4355 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q));
4357 #endif
4359 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
4361 uint32_t gpr_FC = FC(ctx->opcode);
4362 TCGv EA = tcg_temp_new();
4363 TCGv src, discard;
4365 gen_addr_register(ctx, EA);
4366 src = cpu_gpr[rD(ctx->opcode)];
4367 discard = tcg_temp_new();
4369 memop |= MO_ALIGN;
4370 switch (gpr_FC) {
4371 case 0: /* add and Store */
4372 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4373 break;
4374 case 1: /* xor and Store */
4375 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4376 break;
4377 case 2: /* Or and Store */
4378 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4379 break;
4380 case 3: /* 'and' and Store */
4381 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4382 break;
4383 case 4: /* Store max unsigned */
4384 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4385 break;
4386 case 5: /* Store max signed */
4387 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4388 break;
4389 case 6: /* Store min unsigned */
4390 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4391 break;
4392 case 7: /* Store min signed */
4393 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4394 break;
4395 case 24: /* Store twin */
4396 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4397 /* Restart with exclusive lock. */
4398 gen_helper_exit_atomic(cpu_env);
4399 ctx->base.is_jmp = DISAS_NORETURN;
4400 } else {
4401 TCGv t = tcg_temp_new();
4402 TCGv t2 = tcg_temp_new();
4403 TCGv s = tcg_temp_new();
4404 TCGv s2 = tcg_temp_new();
4405 TCGv ea_plus_s = tcg_temp_new();
4407 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4408 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
4409 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
4410 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
4411 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
4412 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
4413 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
4415 tcg_temp_free(ea_plus_s);
4416 tcg_temp_free(s2);
4417 tcg_temp_free(s);
4418 tcg_temp_free(t2);
4419 tcg_temp_free(t);
4421 break;
4422 default:
4423 /* invoke data storage error handler */
4424 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4426 tcg_temp_free(discard);
4427 tcg_temp_free(EA);
4430 static void gen_stwat(DisasContext *ctx)
4432 gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
4435 #ifdef TARGET_PPC64
4436 static void gen_stdat(DisasContext *ctx)
4438 gen_st_atomic(ctx, DEF_MEMOP(MO_Q));
4440 #endif
4442 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
4444 TCGLabel *l1 = gen_new_label();
4445 TCGLabel *l2 = gen_new_label();
4446 TCGv t0 = tcg_temp_new();
4447 int reg = rS(ctx->opcode);
4449 gen_set_access_type(ctx, ACCESS_RES);
4450 gen_addr_reg_index(ctx, t0);
4451 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
4452 tcg_temp_free(t0);
4454 t0 = tcg_temp_new();
4455 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
4456 cpu_gpr[reg], ctx->mem_idx,
4457 DEF_MEMOP(memop) | MO_ALIGN);
4458 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
4459 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
4460 tcg_gen_or_tl(t0, t0, cpu_so);
4461 tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
4462 tcg_temp_free(t0);
4463 tcg_gen_br(l2);
4465 gen_set_label(l1);
4468 * Address mismatch implies failure. But we still need to provide
4469 * the memory barrier semantics of the instruction.
4471 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
4472 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4474 gen_set_label(l2);
4475 tcg_gen_movi_tl(cpu_reserve, -1);
4478 #define STCX(name, memop) \
4479 static void gen_##name(DisasContext *ctx) \
4481 gen_conditional_store(ctx, memop); \
4484 STCX(stbcx_, DEF_MEMOP(MO_UB))
4485 STCX(sthcx_, DEF_MEMOP(MO_UW))
4486 STCX(stwcx_, DEF_MEMOP(MO_UL))
4488 #if defined(TARGET_PPC64)
4489 /* ldarx */
4490 LARX(ldarx, DEF_MEMOP(MO_Q))
4491 /* stdcx. */
4492 STCX(stdcx_, DEF_MEMOP(MO_Q))
4494 /* lqarx */
4495 static void gen_lqarx(DisasContext *ctx)
4497 int rd = rD(ctx->opcode);
4498 TCGv EA, hi, lo;
4500 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
4501 (rd == rB(ctx->opcode)))) {
4502 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4503 return;
4506 gen_set_access_type(ctx, ACCESS_RES);
4507 EA = tcg_temp_new();
4508 gen_addr_reg_index(ctx, EA);
4510 /* Note that the low part is always in RD+1, even in LE mode. */
4511 lo = cpu_gpr[rd + 1];
4512 hi = cpu_gpr[rd];
4514 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4515 if (HAVE_ATOMIC128) {
4516 TCGv_i32 oi = tcg_temp_new_i32();
4517 if (ctx->le_mode) {
4518 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16,
4519 ctx->mem_idx));
4520 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
4521 } else {
4522 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16,
4523 ctx->mem_idx));
4524 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
4526 tcg_temp_free_i32(oi);
4527 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
4528 } else {
4529 /* Restart with exclusive lock. */
4530 gen_helper_exit_atomic(cpu_env);
4531 ctx->base.is_jmp = DISAS_NORETURN;
4532 tcg_temp_free(EA);
4533 return;
4535 } else if (ctx->le_mode) {
4536 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16);
4537 tcg_gen_mov_tl(cpu_reserve, EA);
4538 gen_addr_add(ctx, EA, EA, 8);
4539 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
4540 } else {
4541 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16);
4542 tcg_gen_mov_tl(cpu_reserve, EA);
4543 gen_addr_add(ctx, EA, EA, 8);
4544 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
4546 tcg_temp_free(EA);
4548 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
4549 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
4552 /* stqcx. */
4553 static void gen_stqcx_(DisasContext *ctx)
4555 int rs = rS(ctx->opcode);
4556 TCGv EA, hi, lo;
4558 if (unlikely(rs & 1)) {
4559 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4560 return;
4563 gen_set_access_type(ctx, ACCESS_RES);
4564 EA = tcg_temp_new();
4565 gen_addr_reg_index(ctx, EA);
4567 /* Note that the low part is always in RS+1, even in LE mode. */
4568 lo = cpu_gpr[rs + 1];
4569 hi = cpu_gpr[rs];
4571 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4572 if (HAVE_CMPXCHG128) {
4573 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16);
4574 if (ctx->le_mode) {
4575 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4576 EA, lo, hi, oi);
4577 } else {
4578 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4579 EA, lo, hi, oi);
4581 tcg_temp_free_i32(oi);
4582 } else {
4583 /* Restart with exclusive lock. */
4584 gen_helper_exit_atomic(cpu_env);
4585 ctx->base.is_jmp = DISAS_NORETURN;
4587 tcg_temp_free(EA);
4588 } else {
4589 TCGLabel *lab_fail = gen_new_label();
4590 TCGLabel *lab_over = gen_new_label();
4591 TCGv_i64 t0 = tcg_temp_new_i64();
4592 TCGv_i64 t1 = tcg_temp_new_i64();
4594 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4595 tcg_temp_free(EA);
4597 gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4598 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4599 ? offsetof(CPUPPCState, reserve_val2)
4600 : offsetof(CPUPPCState, reserve_val)));
4601 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4603 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4604 gen_qemu_ld64_i64(ctx, t0, t0);
4605 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4606 ? offsetof(CPUPPCState, reserve_val)
4607 : offsetof(CPUPPCState, reserve_val2)));
4608 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4610 /* Success */
4611 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4612 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4613 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4615 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4616 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4617 tcg_gen_br(lab_over);
4619 gen_set_label(lab_fail);
4620 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4622 gen_set_label(lab_over);
4623 tcg_gen_movi_tl(cpu_reserve, -1);
4624 tcg_temp_free_i64(t0);
4625 tcg_temp_free_i64(t1);
4628 #endif /* defined(TARGET_PPC64) */
4630 /* sync */
4631 static void gen_sync(DisasContext *ctx)
4633 uint32_t l = (ctx->opcode >> 21) & 3;
4636 * We may need to check for a pending TLB flush.
4638 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4640 * Additionally, this can only happen in kernel mode however so
4641 * check MSR_PR as well.
4643 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4644 gen_check_tlb_flush(ctx, true);
4646 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4649 /* wait */
4650 static void gen_wait(DisasContext *ctx)
4652 TCGv_i32 t0 = tcg_const_i32(1);
4653 tcg_gen_st_i32(t0, cpu_env,
4654 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4655 tcg_temp_free_i32(t0);
4656 /* Stop translation, as the CPU is supposed to sleep from now */
4657 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4660 #if defined(TARGET_PPC64)
4661 static void gen_doze(DisasContext *ctx)
4663 #if defined(CONFIG_USER_ONLY)
4664 GEN_PRIV;
4665 #else
4666 TCGv_i32 t;
4668 CHK_HV;
4669 t = tcg_const_i32(PPC_PM_DOZE);
4670 gen_helper_pminsn(cpu_env, t);
4671 tcg_temp_free_i32(t);
4672 /* Stop translation, as the CPU is supposed to sleep from now */
4673 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4674 #endif /* defined(CONFIG_USER_ONLY) */
4677 static void gen_nap(DisasContext *ctx)
4679 #if defined(CONFIG_USER_ONLY)
4680 GEN_PRIV;
4681 #else
4682 TCGv_i32 t;
4684 CHK_HV;
4685 t = tcg_const_i32(PPC_PM_NAP);
4686 gen_helper_pminsn(cpu_env, t);
4687 tcg_temp_free_i32(t);
4688 /* Stop translation, as the CPU is supposed to sleep from now */
4689 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4690 #endif /* defined(CONFIG_USER_ONLY) */
4693 static void gen_stop(DisasContext *ctx)
4695 #if defined(CONFIG_USER_ONLY)
4696 GEN_PRIV;
4697 #else
4698 TCGv_i32 t;
4700 CHK_HV;
4701 t = tcg_const_i32(PPC_PM_STOP);
4702 gen_helper_pminsn(cpu_env, t);
4703 tcg_temp_free_i32(t);
4704 /* Stop translation, as the CPU is supposed to sleep from now */
4705 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4706 #endif /* defined(CONFIG_USER_ONLY) */
4709 static void gen_sleep(DisasContext *ctx)
4711 #if defined(CONFIG_USER_ONLY)
4712 GEN_PRIV;
4713 #else
4714 TCGv_i32 t;
4716 CHK_HV;
4717 t = tcg_const_i32(PPC_PM_SLEEP);
4718 gen_helper_pminsn(cpu_env, t);
4719 tcg_temp_free_i32(t);
4720 /* Stop translation, as the CPU is supposed to sleep from now */
4721 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4722 #endif /* defined(CONFIG_USER_ONLY) */
4725 static void gen_rvwinkle(DisasContext *ctx)
4727 #if defined(CONFIG_USER_ONLY)
4728 GEN_PRIV;
4729 #else
4730 TCGv_i32 t;
4732 CHK_HV;
4733 t = tcg_const_i32(PPC_PM_RVWINKLE);
4734 gen_helper_pminsn(cpu_env, t);
4735 tcg_temp_free_i32(t);
4736 /* Stop translation, as the CPU is supposed to sleep from now */
4737 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4738 #endif /* defined(CONFIG_USER_ONLY) */
4740 #endif /* #if defined(TARGET_PPC64) */
4742 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4744 #if defined(TARGET_PPC64)
4745 if (ctx->has_cfar) {
4746 tcg_gen_movi_tl(cpu_cfar, nip);
4748 #endif
4751 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4753 if (unlikely(ctx->singlestep_enabled)) {
4754 return false;
4757 #ifndef CONFIG_USER_ONLY
4758 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
4759 #else
4760 return true;
4761 #endif
4764 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4766 int sse = ctx->singlestep_enabled;
4767 if (unlikely(sse)) {
4768 if (sse & GDBSTUB_SINGLE_STEP) {
4769 gen_debug_exception(ctx);
4770 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) {
4771 uint32_t excp = gen_prep_dbgex(ctx);
4772 gen_exception(ctx, excp);
4774 tcg_gen_exit_tb(NULL, 0);
4775 } else {
4776 tcg_gen_lookup_and_goto_ptr();
4780 /*** Branch ***/
4781 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4783 if (NARROW_MODE(ctx)) {
4784 dest = (uint32_t) dest;
4786 if (use_goto_tb(ctx, dest)) {
4787 tcg_gen_goto_tb(n);
4788 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4789 tcg_gen_exit_tb(ctx->base.tb, n);
4790 } else {
4791 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4792 gen_lookup_and_goto_ptr(ctx);
4796 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4798 if (NARROW_MODE(ctx)) {
4799 nip = (uint32_t)nip;
4801 tcg_gen_movi_tl(cpu_lr, nip);
4804 /* b ba bl bla */
4805 static void gen_b(DisasContext *ctx)
4807 target_ulong li, target;
4809 ctx->exception = POWERPC_EXCP_BRANCH;
4810 /* sign extend LI */
4811 li = LI(ctx->opcode);
4812 li = (li ^ 0x02000000) - 0x02000000;
4813 if (likely(AA(ctx->opcode) == 0)) {
4814 target = ctx->cia + li;
4815 } else {
4816 target = li;
4818 if (LK(ctx->opcode)) {
4819 gen_setlr(ctx, ctx->base.pc_next);
4821 gen_update_cfar(ctx, ctx->cia);
4822 gen_goto_tb(ctx, 0, target);
4825 #define BCOND_IM 0
4826 #define BCOND_LR 1
4827 #define BCOND_CTR 2
4828 #define BCOND_TAR 3
4830 static void gen_bcond(DisasContext *ctx, int type)
4832 uint32_t bo = BO(ctx->opcode);
4833 TCGLabel *l1;
4834 TCGv target;
4835 ctx->exception = POWERPC_EXCP_BRANCH;
4837 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4838 target = tcg_temp_local_new();
4839 if (type == BCOND_CTR) {
4840 tcg_gen_mov_tl(target, cpu_ctr);
4841 } else if (type == BCOND_TAR) {
4842 gen_load_spr(target, SPR_TAR);
4843 } else {
4844 tcg_gen_mov_tl(target, cpu_lr);
4846 } else {
4847 target = NULL;
4849 if (LK(ctx->opcode)) {
4850 gen_setlr(ctx, ctx->base.pc_next);
4852 l1 = gen_new_label();
4853 if ((bo & 0x4) == 0) {
4854 /* Decrement and test CTR */
4855 TCGv temp = tcg_temp_new();
4857 if (type == BCOND_CTR) {
4859 * All ISAs up to v3 describe this form of bcctr as invalid but
4860 * some processors, ie. 64-bit server processors compliant with
4861 * arch 2.x, do implement a "test and decrement" logic instead,
4862 * as described in their respective UMs. This logic involves CTR
4863 * to act as both the branch target and a counter, which makes
4864 * it basically useless and thus never used in real code.
4866 * This form was hence chosen to trigger extra micro-architectural
4867 * side-effect on real HW needed for the Spectre v2 workaround.
4868 * It is up to guests that implement such workaround, ie. linux, to
4869 * use this form in a way it just triggers the side-effect without
4870 * doing anything else harmful.
4872 if (unlikely(!is_book3s_arch2x(ctx))) {
4873 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4874 tcg_temp_free(temp);
4875 tcg_temp_free(target);
4876 return;
4879 if (NARROW_MODE(ctx)) {
4880 tcg_gen_ext32u_tl(temp, cpu_ctr);
4881 } else {
4882 tcg_gen_mov_tl(temp, cpu_ctr);
4884 if (bo & 0x2) {
4885 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4886 } else {
4887 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4889 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4890 } else {
4891 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4892 if (NARROW_MODE(ctx)) {
4893 tcg_gen_ext32u_tl(temp, cpu_ctr);
4894 } else {
4895 tcg_gen_mov_tl(temp, cpu_ctr);
4897 if (bo & 0x2) {
4898 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4899 } else {
4900 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4903 tcg_temp_free(temp);
4905 if ((bo & 0x10) == 0) {
4906 /* Test CR */
4907 uint32_t bi = BI(ctx->opcode);
4908 uint32_t mask = 0x08 >> (bi & 0x03);
4909 TCGv_i32 temp = tcg_temp_new_i32();
4911 if (bo & 0x8) {
4912 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4913 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4914 } else {
4915 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4916 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4918 tcg_temp_free_i32(temp);
4920 gen_update_cfar(ctx, ctx->cia);
4921 if (type == BCOND_IM) {
4922 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4923 if (likely(AA(ctx->opcode) == 0)) {
4924 gen_goto_tb(ctx, 0, ctx->cia + li);
4925 } else {
4926 gen_goto_tb(ctx, 0, li);
4928 } else {
4929 if (NARROW_MODE(ctx)) {
4930 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4931 } else {
4932 tcg_gen_andi_tl(cpu_nip, target, ~3);
4934 gen_lookup_and_goto_ptr(ctx);
4935 tcg_temp_free(target);
4937 if ((bo & 0x14) != 0x14) {
4938 /* fallthrough case */
4939 gen_set_label(l1);
4940 gen_goto_tb(ctx, 1, ctx->base.pc_next);
4944 static void gen_bc(DisasContext *ctx)
4946 gen_bcond(ctx, BCOND_IM);
4949 static void gen_bcctr(DisasContext *ctx)
4951 gen_bcond(ctx, BCOND_CTR);
4954 static void gen_bclr(DisasContext *ctx)
4956 gen_bcond(ctx, BCOND_LR);
4959 static void gen_bctar(DisasContext *ctx)
4961 gen_bcond(ctx, BCOND_TAR);
4964 /*** Condition register logical ***/
4965 #define GEN_CRLOGIC(name, tcg_op, opc) \
4966 static void glue(gen_, name)(DisasContext *ctx) \
4968 uint8_t bitmask; \
4969 int sh; \
4970 TCGv_i32 t0, t1; \
4971 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
4972 t0 = tcg_temp_new_i32(); \
4973 if (sh > 0) \
4974 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
4975 else if (sh < 0) \
4976 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
4977 else \
4978 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
4979 t1 = tcg_temp_new_i32(); \
4980 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
4981 if (sh > 0) \
4982 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
4983 else if (sh < 0) \
4984 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
4985 else \
4986 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
4987 tcg_op(t0, t0, t1); \
4988 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
4989 tcg_gen_andi_i32(t0, t0, bitmask); \
4990 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
4991 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
4992 tcg_temp_free_i32(t0); \
4993 tcg_temp_free_i32(t1); \
4996 /* crand */
4997 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4998 /* crandc */
4999 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
5000 /* creqv */
5001 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
5002 /* crnand */
5003 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
5004 /* crnor */
5005 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
5006 /* cror */
5007 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
5008 /* crorc */
5009 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
5010 /* crxor */
5011 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
5013 /* mcrf */
5014 static void gen_mcrf(DisasContext *ctx)
5016 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
5019 /*** System linkage ***/
5021 /* rfi (supervisor only) */
5022 static void gen_rfi(DisasContext *ctx)
5024 #if defined(CONFIG_USER_ONLY)
5025 GEN_PRIV;
5026 #else
5028 * This instruction doesn't exist anymore on 64-bit server
5029 * processors compliant with arch 2.x
5031 if (is_book3s_arch2x(ctx)) {
5032 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5033 return;
5035 /* Restore CPU state */
5036 CHK_SV;
5037 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5038 gen_io_start();
5040 gen_update_cfar(ctx, ctx->cia);
5041 gen_helper_rfi(cpu_env);
5042 gen_sync_exception(ctx);
5043 #endif
5046 #if defined(TARGET_PPC64)
5047 static void gen_rfid(DisasContext *ctx)
5049 #if defined(CONFIG_USER_ONLY)
5050 GEN_PRIV;
5051 #else
5052 /* Restore CPU state */
5053 CHK_SV;
5054 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5055 gen_io_start();
5057 gen_update_cfar(ctx, ctx->cia);
5058 gen_helper_rfid(cpu_env);
5059 gen_sync_exception(ctx);
5060 #endif
5063 #if !defined(CONFIG_USER_ONLY)
5064 static void gen_rfscv(DisasContext *ctx)
5066 #if defined(CONFIG_USER_ONLY)
5067 GEN_PRIV;
5068 #else
5069 /* Restore CPU state */
5070 CHK_SV;
5071 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5072 gen_io_start();
5074 gen_update_cfar(ctx, ctx->cia);
5075 gen_helper_rfscv(cpu_env);
5076 gen_sync_exception(ctx);
5077 #endif
5079 #endif
5081 static void gen_hrfid(DisasContext *ctx)
5083 #if defined(CONFIG_USER_ONLY)
5084 GEN_PRIV;
5085 #else
5086 /* Restore CPU state */
5087 CHK_HV;
5088 gen_helper_hrfid(cpu_env);
5089 gen_sync_exception(ctx);
5090 #endif
5092 #endif
5094 /* sc */
5095 #if defined(CONFIG_USER_ONLY)
5096 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
5097 #else
5098 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
5099 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
5100 #endif
5101 static void gen_sc(DisasContext *ctx)
5103 uint32_t lev;
5105 lev = (ctx->opcode >> 5) & 0x7F;
5106 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
5109 #if defined(TARGET_PPC64)
5110 #if !defined(CONFIG_USER_ONLY)
5111 static void gen_scv(DisasContext *ctx)
5113 uint32_t lev = (ctx->opcode >> 5) & 0x7F;
5115 /* Set the PC back to the faulting instruction. */
5116 if (ctx->exception == POWERPC_EXCP_NONE) {
5117 gen_update_nip(ctx, ctx->cia);
5119 gen_helper_scv(cpu_env, tcg_constant_i32(lev));
5121 /* This need not be exact, just not POWERPC_EXCP_NONE */
5122 ctx->exception = POWERPC_SYSCALL_VECTORED;
5124 #endif
5125 #endif
5127 /*** Trap ***/
5129 /* Check for unconditional traps (always or never) */
5130 static bool check_unconditional_trap(DisasContext *ctx)
5132 /* Trap never */
5133 if (TO(ctx->opcode) == 0) {
5134 return true;
5136 /* Trap always */
5137 if (TO(ctx->opcode) == 31) {
5138 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
5139 return true;
5141 return false;
5144 /* tw */
5145 static void gen_tw(DisasContext *ctx)
5147 TCGv_i32 t0;
5149 if (check_unconditional_trap(ctx)) {
5150 return;
5152 t0 = tcg_const_i32(TO(ctx->opcode));
5153 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5154 t0);
5155 tcg_temp_free_i32(t0);
5158 /* twi */
5159 static void gen_twi(DisasContext *ctx)
5161 TCGv t0;
5162 TCGv_i32 t1;
5164 if (check_unconditional_trap(ctx)) {
5165 return;
5167 t0 = tcg_const_tl(SIMM(ctx->opcode));
5168 t1 = tcg_const_i32(TO(ctx->opcode));
5169 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
5170 tcg_temp_free(t0);
5171 tcg_temp_free_i32(t1);
5174 #if defined(TARGET_PPC64)
5175 /* td */
5176 static void gen_td(DisasContext *ctx)
5178 TCGv_i32 t0;
5180 if (check_unconditional_trap(ctx)) {
5181 return;
5183 t0 = tcg_const_i32(TO(ctx->opcode));
5184 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5185 t0);
5186 tcg_temp_free_i32(t0);
5189 /* tdi */
5190 static void gen_tdi(DisasContext *ctx)
5192 TCGv t0;
5193 TCGv_i32 t1;
5195 if (check_unconditional_trap(ctx)) {
5196 return;
5198 t0 = tcg_const_tl(SIMM(ctx->opcode));
5199 t1 = tcg_const_i32(TO(ctx->opcode));
5200 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
5201 tcg_temp_free(t0);
5202 tcg_temp_free_i32(t1);
5204 #endif
5206 /*** Processor control ***/
5208 /* mcrxr */
5209 static void gen_mcrxr(DisasContext *ctx)
5211 TCGv_i32 t0 = tcg_temp_new_i32();
5212 TCGv_i32 t1 = tcg_temp_new_i32();
5213 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
5215 tcg_gen_trunc_tl_i32(t0, cpu_so);
5216 tcg_gen_trunc_tl_i32(t1, cpu_ov);
5217 tcg_gen_trunc_tl_i32(dst, cpu_ca);
5218 tcg_gen_shli_i32(t0, t0, 3);
5219 tcg_gen_shli_i32(t1, t1, 2);
5220 tcg_gen_shli_i32(dst, dst, 1);
5221 tcg_gen_or_i32(dst, dst, t0);
5222 tcg_gen_or_i32(dst, dst, t1);
5223 tcg_temp_free_i32(t0);
5224 tcg_temp_free_i32(t1);
5226 tcg_gen_movi_tl(cpu_so, 0);
5227 tcg_gen_movi_tl(cpu_ov, 0);
5228 tcg_gen_movi_tl(cpu_ca, 0);
5231 #ifdef TARGET_PPC64
5232 /* mcrxrx */
5233 static void gen_mcrxrx(DisasContext *ctx)
5235 TCGv t0 = tcg_temp_new();
5236 TCGv t1 = tcg_temp_new();
5237 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
5239 /* copy OV and OV32 */
5240 tcg_gen_shli_tl(t0, cpu_ov, 1);
5241 tcg_gen_or_tl(t0, t0, cpu_ov32);
5242 tcg_gen_shli_tl(t0, t0, 2);
5243 /* copy CA and CA32 */
5244 tcg_gen_shli_tl(t1, cpu_ca, 1);
5245 tcg_gen_or_tl(t1, t1, cpu_ca32);
5246 tcg_gen_or_tl(t0, t0, t1);
5247 tcg_gen_trunc_tl_i32(dst, t0);
5248 tcg_temp_free(t0);
5249 tcg_temp_free(t1);
5251 #endif
5253 /* mfcr mfocrf */
5254 static void gen_mfcr(DisasContext *ctx)
5256 uint32_t crm, crn;
5258 if (likely(ctx->opcode & 0x00100000)) {
5259 crm = CRM(ctx->opcode);
5260 if (likely(crm && ((crm & (crm - 1)) == 0))) {
5261 crn = ctz32(crm);
5262 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
5263 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
5264 cpu_gpr[rD(ctx->opcode)], crn * 4);
5266 } else {
5267 TCGv_i32 t0 = tcg_temp_new_i32();
5268 tcg_gen_mov_i32(t0, cpu_crf[0]);
5269 tcg_gen_shli_i32(t0, t0, 4);
5270 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
5271 tcg_gen_shli_i32(t0, t0, 4);
5272 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
5273 tcg_gen_shli_i32(t0, t0, 4);
5274 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
5275 tcg_gen_shli_i32(t0, t0, 4);
5276 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
5277 tcg_gen_shli_i32(t0, t0, 4);
5278 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
5279 tcg_gen_shli_i32(t0, t0, 4);
5280 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
5281 tcg_gen_shli_i32(t0, t0, 4);
5282 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
5283 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5284 tcg_temp_free_i32(t0);
5288 /* mfmsr */
5289 static void gen_mfmsr(DisasContext *ctx)
5291 CHK_SV;
5292 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
5295 /* mfspr */
5296 static inline void gen_op_mfspr(DisasContext *ctx)
5298 void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
5299 uint32_t sprn = SPR(ctx->opcode);
5301 #if defined(CONFIG_USER_ONLY)
5302 read_cb = ctx->spr_cb[sprn].uea_read;
5303 #else
5304 if (ctx->pr) {
5305 read_cb = ctx->spr_cb[sprn].uea_read;
5306 } else if (ctx->hv) {
5307 read_cb = ctx->spr_cb[sprn].hea_read;
5308 } else {
5309 read_cb = ctx->spr_cb[sprn].oea_read;
5311 #endif
5312 if (likely(read_cb != NULL)) {
5313 if (likely(read_cb != SPR_NOACCESS)) {
5314 (*read_cb)(ctx, rD(ctx->opcode), sprn);
5315 } else {
5316 /* Privilege exception */
5318 * This is a hack to avoid warnings when running Linux:
5319 * this OS breaks the PowerPC virtualisation model,
5320 * allowing userland application to read the PVR
5322 if (sprn != SPR_PVR) {
5323 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
5324 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5325 ctx->cia);
5327 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5329 } else {
5330 /* ISA 2.07 defines these as no-ops */
5331 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5332 (sprn >= 808 && sprn <= 811)) {
5333 /* This is a nop */
5334 return;
5336 /* Not defined */
5337 qemu_log_mask(LOG_GUEST_ERROR,
5338 "Trying to read invalid spr %d (0x%03x) at "
5339 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5342 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5343 * generate a priv, a hv emu or a no-op
5345 if (sprn & 0x10) {
5346 if (ctx->pr) {
5347 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5349 } else {
5350 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
5351 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5357 static void gen_mfspr(DisasContext *ctx)
5359 gen_op_mfspr(ctx);
5362 /* mftb */
5363 static void gen_mftb(DisasContext *ctx)
5365 gen_op_mfspr(ctx);
5368 /* mtcrf mtocrf*/
5369 static void gen_mtcrf(DisasContext *ctx)
5371 uint32_t crm, crn;
5373 crm = CRM(ctx->opcode);
5374 if (likely((ctx->opcode & 0x00100000))) {
5375 if (crm && ((crm & (crm - 1)) == 0)) {
5376 TCGv_i32 temp = tcg_temp_new_i32();
5377 crn = ctz32(crm);
5378 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5379 tcg_gen_shri_i32(temp, temp, crn * 4);
5380 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
5381 tcg_temp_free_i32(temp);
5383 } else {
5384 TCGv_i32 temp = tcg_temp_new_i32();
5385 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5386 for (crn = 0 ; crn < 8 ; crn++) {
5387 if (crm & (1 << crn)) {
5388 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
5389 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
5392 tcg_temp_free_i32(temp);
5396 /* mtmsr */
5397 #if defined(TARGET_PPC64)
5398 static void gen_mtmsrd(DisasContext *ctx)
5400 CHK_SV;
5402 #if !defined(CONFIG_USER_ONLY)
5403 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5404 gen_io_start();
5406 if (ctx->opcode & 0x00010000) {
5407 /* L=1 form only updates EE and RI */
5408 TCGv t0 = tcg_temp_new();
5409 TCGv t1 = tcg_temp_new();
5410 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5411 (1 << MSR_RI) | (1 << MSR_EE));
5412 tcg_gen_andi_tl(t1, cpu_msr,
5413 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5414 tcg_gen_or_tl(t1, t1, t0);
5416 gen_helper_store_msr(cpu_env, t1);
5417 tcg_temp_free(t0);
5418 tcg_temp_free(t1);
5420 } else {
5422 * XXX: we need to update nip before the store if we enter
5423 * power saving mode, we will exit the loop directly from
5424 * ppc_store_msr
5426 gen_update_nip(ctx, ctx->base.pc_next);
5427 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
5429 /* Must stop the translation as machine state (may have) changed */
5430 gen_stop_exception(ctx);
5431 #endif /* !defined(CONFIG_USER_ONLY) */
5433 #endif /* defined(TARGET_PPC64) */
5435 static void gen_mtmsr(DisasContext *ctx)
5437 CHK_SV;
5439 #if !defined(CONFIG_USER_ONLY)
5440 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5441 gen_io_start();
5443 if (ctx->opcode & 0x00010000) {
5444 /* L=1 form only updates EE and RI */
5445 TCGv t0 = tcg_temp_new();
5446 TCGv t1 = tcg_temp_new();
5447 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5448 (1 << MSR_RI) | (1 << MSR_EE));
5449 tcg_gen_andi_tl(t1, cpu_msr,
5450 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5451 tcg_gen_or_tl(t1, t1, t0);
5453 gen_helper_store_msr(cpu_env, t1);
5454 tcg_temp_free(t0);
5455 tcg_temp_free(t1);
5457 } else {
5458 TCGv msr = tcg_temp_new();
5461 * XXX: we need to update nip before the store if we enter
5462 * power saving mode, we will exit the loop directly from
5463 * ppc_store_msr
5465 gen_update_nip(ctx, ctx->base.pc_next);
5466 #if defined(TARGET_PPC64)
5467 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
5468 #else
5469 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
5470 #endif
5471 gen_helper_store_msr(cpu_env, msr);
5472 tcg_temp_free(msr);
5474 /* Must stop the translation as machine state (may have) changed */
5475 gen_stop_exception(ctx);
5476 #endif
5479 /* mtspr */
5480 static void gen_mtspr(DisasContext *ctx)
5482 void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5483 uint32_t sprn = SPR(ctx->opcode);
5485 #if defined(CONFIG_USER_ONLY)
5486 write_cb = ctx->spr_cb[sprn].uea_write;
5487 #else
5488 if (ctx->pr) {
5489 write_cb = ctx->spr_cb[sprn].uea_write;
5490 } else if (ctx->hv) {
5491 write_cb = ctx->spr_cb[sprn].hea_write;
5492 } else {
5493 write_cb = ctx->spr_cb[sprn].oea_write;
5495 #endif
5496 if (likely(write_cb != NULL)) {
5497 if (likely(write_cb != SPR_NOACCESS)) {
5498 (*write_cb)(ctx, sprn, rS(ctx->opcode));
5499 } else {
5500 /* Privilege exception */
5501 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5502 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5503 ctx->cia);
5504 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5506 } else {
5507 /* ISA 2.07 defines these as no-ops */
5508 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5509 (sprn >= 808 && sprn <= 811)) {
5510 /* This is a nop */
5511 return;
5514 /* Not defined */
5515 qemu_log_mask(LOG_GUEST_ERROR,
5516 "Trying to write invalid spr %d (0x%03x) at "
5517 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5521 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5522 * generate a priv, a hv emu or a no-op
5524 if (sprn & 0x10) {
5525 if (ctx->pr) {
5526 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5528 } else {
5529 if (ctx->pr || sprn == 0) {
5530 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5536 #if defined(TARGET_PPC64)
5537 /* setb */
5538 static void gen_setb(DisasContext *ctx)
5540 TCGv_i32 t0 = tcg_temp_new_i32();
5541 TCGv_i32 t8 = tcg_temp_new_i32();
5542 TCGv_i32 tm1 = tcg_temp_new_i32();
5543 int crf = crfS(ctx->opcode);
5545 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5546 tcg_gen_movi_i32(t8, 8);
5547 tcg_gen_movi_i32(tm1, -1);
5548 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5549 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5551 tcg_temp_free_i32(t0);
5552 tcg_temp_free_i32(t8);
5553 tcg_temp_free_i32(tm1);
5555 #endif
5557 /*** Cache management ***/
5559 /* dcbf */
5560 static void gen_dcbf(DisasContext *ctx)
5562 /* XXX: specification says this is treated as a load by the MMU */
5563 TCGv t0;
5564 gen_set_access_type(ctx, ACCESS_CACHE);
5565 t0 = tcg_temp_new();
5566 gen_addr_reg_index(ctx, t0);
5567 gen_qemu_ld8u(ctx, t0, t0);
5568 tcg_temp_free(t0);
5571 /* dcbfep (external PID dcbf) */
5572 static void gen_dcbfep(DisasContext *ctx)
5574 /* XXX: specification says this is treated as a load by the MMU */
5575 TCGv t0;
5576 CHK_SV;
5577 gen_set_access_type(ctx, ACCESS_CACHE);
5578 t0 = tcg_temp_new();
5579 gen_addr_reg_index(ctx, t0);
5580 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5581 tcg_temp_free(t0);
5584 /* dcbi (Supervisor only) */
5585 static void gen_dcbi(DisasContext *ctx)
5587 #if defined(CONFIG_USER_ONLY)
5588 GEN_PRIV;
5589 #else
5590 TCGv EA, val;
5592 CHK_SV;
5593 EA = tcg_temp_new();
5594 gen_set_access_type(ctx, ACCESS_CACHE);
5595 gen_addr_reg_index(ctx, EA);
5596 val = tcg_temp_new();
5597 /* XXX: specification says this should be treated as a store by the MMU */
5598 gen_qemu_ld8u(ctx, val, EA);
5599 gen_qemu_st8(ctx, val, EA);
5600 tcg_temp_free(val);
5601 tcg_temp_free(EA);
5602 #endif /* defined(CONFIG_USER_ONLY) */
5605 /* dcdst */
5606 static void gen_dcbst(DisasContext *ctx)
5608 /* XXX: specification say this is treated as a load by the MMU */
5609 TCGv t0;
5610 gen_set_access_type(ctx, ACCESS_CACHE);
5611 t0 = tcg_temp_new();
5612 gen_addr_reg_index(ctx, t0);
5613 gen_qemu_ld8u(ctx, t0, t0);
5614 tcg_temp_free(t0);
5617 /* dcbstep (dcbstep External PID version) */
5618 static void gen_dcbstep(DisasContext *ctx)
5620 /* XXX: specification say this is treated as a load by the MMU */
5621 TCGv t0;
5622 gen_set_access_type(ctx, ACCESS_CACHE);
5623 t0 = tcg_temp_new();
5624 gen_addr_reg_index(ctx, t0);
5625 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5626 tcg_temp_free(t0);
5629 /* dcbt */
5630 static void gen_dcbt(DisasContext *ctx)
5633 * interpreted as no-op
5634 * XXX: specification say this is treated as a load by the MMU but
5635 * does not generate any exception
5639 /* dcbtep */
5640 static void gen_dcbtep(DisasContext *ctx)
5643 * interpreted as no-op
5644 * XXX: specification say this is treated as a load by the MMU but
5645 * does not generate any exception
5649 /* dcbtst */
5650 static void gen_dcbtst(DisasContext *ctx)
5653 * interpreted as no-op
5654 * XXX: specification say this is treated as a load by the MMU but
5655 * does not generate any exception
5659 /* dcbtstep */
5660 static void gen_dcbtstep(DisasContext *ctx)
5663 * interpreted as no-op
5664 * XXX: specification say this is treated as a load by the MMU but
5665 * does not generate any exception
5669 /* dcbtls */
5670 static void gen_dcbtls(DisasContext *ctx)
5672 /* Always fails locking the cache */
5673 TCGv t0 = tcg_temp_new();
5674 gen_load_spr(t0, SPR_Exxx_L1CSR0);
5675 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5676 gen_store_spr(SPR_Exxx_L1CSR0, t0);
5677 tcg_temp_free(t0);
5680 /* dcbz */
5681 static void gen_dcbz(DisasContext *ctx)
5683 TCGv tcgv_addr;
5684 TCGv_i32 tcgv_op;
5686 gen_set_access_type(ctx, ACCESS_CACHE);
5687 tcgv_addr = tcg_temp_new();
5688 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5689 gen_addr_reg_index(ctx, tcgv_addr);
5690 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5691 tcg_temp_free(tcgv_addr);
5692 tcg_temp_free_i32(tcgv_op);
5695 /* dcbzep */
5696 static void gen_dcbzep(DisasContext *ctx)
5698 TCGv tcgv_addr;
5699 TCGv_i32 tcgv_op;
5701 gen_set_access_type(ctx, ACCESS_CACHE);
5702 tcgv_addr = tcg_temp_new();
5703 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5704 gen_addr_reg_index(ctx, tcgv_addr);
5705 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5706 tcg_temp_free(tcgv_addr);
5707 tcg_temp_free_i32(tcgv_op);
5710 /* dst / dstt */
5711 static void gen_dst(DisasContext *ctx)
5713 if (rA(ctx->opcode) == 0) {
5714 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5715 } else {
5716 /* interpreted as no-op */
5720 /* dstst /dststt */
5721 static void gen_dstst(DisasContext *ctx)
5723 if (rA(ctx->opcode) == 0) {
5724 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5725 } else {
5726 /* interpreted as no-op */
5731 /* dss / dssall */
5732 static void gen_dss(DisasContext *ctx)
5734 /* interpreted as no-op */
5737 /* icbi */
5738 static void gen_icbi(DisasContext *ctx)
5740 TCGv t0;
5741 gen_set_access_type(ctx, ACCESS_CACHE);
5742 t0 = tcg_temp_new();
5743 gen_addr_reg_index(ctx, t0);
5744 gen_helper_icbi(cpu_env, t0);
5745 tcg_temp_free(t0);
5748 /* icbiep */
5749 static void gen_icbiep(DisasContext *ctx)
5751 TCGv t0;
5752 gen_set_access_type(ctx, ACCESS_CACHE);
5753 t0 = tcg_temp_new();
5754 gen_addr_reg_index(ctx, t0);
5755 gen_helper_icbiep(cpu_env, t0);
5756 tcg_temp_free(t0);
5759 /* Optional: */
5760 /* dcba */
5761 static void gen_dcba(DisasContext *ctx)
5764 * interpreted as no-op
5765 * XXX: specification say this is treated as a store by the MMU
5766 * but does not generate any exception
5770 /*** Segment register manipulation ***/
5771 /* Supervisor only: */
5773 /* mfsr */
5774 static void gen_mfsr(DisasContext *ctx)
5776 #if defined(CONFIG_USER_ONLY)
5777 GEN_PRIV;
5778 #else
5779 TCGv t0;
5781 CHK_SV;
5782 t0 = tcg_const_tl(SR(ctx->opcode));
5783 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5784 tcg_temp_free(t0);
5785 #endif /* defined(CONFIG_USER_ONLY) */
5788 /* mfsrin */
5789 static void gen_mfsrin(DisasContext *ctx)
5791 #if defined(CONFIG_USER_ONLY)
5792 GEN_PRIV;
5793 #else
5794 TCGv t0;
5796 CHK_SV;
5797 t0 = tcg_temp_new();
5798 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5799 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5800 tcg_temp_free(t0);
5801 #endif /* defined(CONFIG_USER_ONLY) */
5804 /* mtsr */
5805 static void gen_mtsr(DisasContext *ctx)
5807 #if defined(CONFIG_USER_ONLY)
5808 GEN_PRIV;
5809 #else
5810 TCGv t0;
5812 CHK_SV;
5813 t0 = tcg_const_tl(SR(ctx->opcode));
5814 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5815 tcg_temp_free(t0);
5816 #endif /* defined(CONFIG_USER_ONLY) */
5819 /* mtsrin */
5820 static void gen_mtsrin(DisasContext *ctx)
5822 #if defined(CONFIG_USER_ONLY)
5823 GEN_PRIV;
5824 #else
5825 TCGv t0;
5826 CHK_SV;
5828 t0 = tcg_temp_new();
5829 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5830 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5831 tcg_temp_free(t0);
5832 #endif /* defined(CONFIG_USER_ONLY) */
5835 #if defined(TARGET_PPC64)
5836 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5838 /* mfsr */
5839 static void gen_mfsr_64b(DisasContext *ctx)
5841 #if defined(CONFIG_USER_ONLY)
5842 GEN_PRIV;
5843 #else
5844 TCGv t0;
5846 CHK_SV;
5847 t0 = tcg_const_tl(SR(ctx->opcode));
5848 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5849 tcg_temp_free(t0);
5850 #endif /* defined(CONFIG_USER_ONLY) */
5853 /* mfsrin */
5854 static void gen_mfsrin_64b(DisasContext *ctx)
5856 #if defined(CONFIG_USER_ONLY)
5857 GEN_PRIV;
5858 #else
5859 TCGv t0;
5861 CHK_SV;
5862 t0 = tcg_temp_new();
5863 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5864 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5865 tcg_temp_free(t0);
5866 #endif /* defined(CONFIG_USER_ONLY) */
5869 /* mtsr */
5870 static void gen_mtsr_64b(DisasContext *ctx)
5872 #if defined(CONFIG_USER_ONLY)
5873 GEN_PRIV;
5874 #else
5875 TCGv t0;
5877 CHK_SV;
5878 t0 = tcg_const_tl(SR(ctx->opcode));
5879 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5880 tcg_temp_free(t0);
5881 #endif /* defined(CONFIG_USER_ONLY) */
5884 /* mtsrin */
5885 static void gen_mtsrin_64b(DisasContext *ctx)
5887 #if defined(CONFIG_USER_ONLY)
5888 GEN_PRIV;
5889 #else
5890 TCGv t0;
5892 CHK_SV;
5893 t0 = tcg_temp_new();
5894 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5895 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5896 tcg_temp_free(t0);
5897 #endif /* defined(CONFIG_USER_ONLY) */
5900 /* slbmte */
5901 static void gen_slbmte(DisasContext *ctx)
5903 #if defined(CONFIG_USER_ONLY)
5904 GEN_PRIV;
5905 #else
5906 CHK_SV;
5908 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
5909 cpu_gpr[rS(ctx->opcode)]);
5910 #endif /* defined(CONFIG_USER_ONLY) */
5913 static void gen_slbmfee(DisasContext *ctx)
5915 #if defined(CONFIG_USER_ONLY)
5916 GEN_PRIV;
5917 #else
5918 CHK_SV;
5920 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5921 cpu_gpr[rB(ctx->opcode)]);
5922 #endif /* defined(CONFIG_USER_ONLY) */
5925 static void gen_slbmfev(DisasContext *ctx)
5927 #if defined(CONFIG_USER_ONLY)
5928 GEN_PRIV;
5929 #else
5930 CHK_SV;
5932 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5933 cpu_gpr[rB(ctx->opcode)]);
5934 #endif /* defined(CONFIG_USER_ONLY) */
5937 static void gen_slbfee_(DisasContext *ctx)
5939 #if defined(CONFIG_USER_ONLY)
5940 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5941 #else
5942 TCGLabel *l1, *l2;
5944 if (unlikely(ctx->pr)) {
5945 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5946 return;
5948 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5949 cpu_gpr[rB(ctx->opcode)]);
5950 l1 = gen_new_label();
5951 l2 = gen_new_label();
5952 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5953 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
5954 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
5955 tcg_gen_br(l2);
5956 gen_set_label(l1);
5957 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
5958 gen_set_label(l2);
5959 #endif
5961 #endif /* defined(TARGET_PPC64) */
5963 /*** Lookaside buffer management ***/
5964 /* Optional & supervisor only: */
5966 /* tlbia */
5967 static void gen_tlbia(DisasContext *ctx)
5969 #if defined(CONFIG_USER_ONLY)
5970 GEN_PRIV;
5971 #else
5972 CHK_HV;
5974 gen_helper_tlbia(cpu_env);
5975 #endif /* defined(CONFIG_USER_ONLY) */
5978 /* tlbiel */
5979 static void gen_tlbiel(DisasContext *ctx)
5981 #if defined(CONFIG_USER_ONLY)
5982 GEN_PRIV;
5983 #else
5984 CHK_SV;
5986 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5987 #endif /* defined(CONFIG_USER_ONLY) */
5990 /* tlbie */
5991 static void gen_tlbie(DisasContext *ctx)
5993 #if defined(CONFIG_USER_ONLY)
5994 GEN_PRIV;
5995 #else
5996 TCGv_i32 t1;
5998 if (ctx->gtse) {
5999 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */
6000 } else {
6001 CHK_HV; /* Else hypervisor privileged */
6004 if (NARROW_MODE(ctx)) {
6005 TCGv t0 = tcg_temp_new();
6006 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
6007 gen_helper_tlbie(cpu_env, t0);
6008 tcg_temp_free(t0);
6009 } else {
6010 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6012 t1 = tcg_temp_new_i32();
6013 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
6014 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
6015 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
6016 tcg_temp_free_i32(t1);
6017 #endif /* defined(CONFIG_USER_ONLY) */
6020 /* tlbsync */
6021 static void gen_tlbsync(DisasContext *ctx)
6023 #if defined(CONFIG_USER_ONLY)
6024 GEN_PRIV;
6025 #else
6027 if (ctx->gtse) {
6028 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */
6029 } else {
6030 CHK_HV; /* Else hypervisor privileged */
6033 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
6034 if (ctx->insns_flags & PPC_BOOKE) {
6035 gen_check_tlb_flush(ctx, true);
6037 #endif /* defined(CONFIG_USER_ONLY) */
6040 #if defined(TARGET_PPC64)
6041 /* slbia */
6042 static void gen_slbia(DisasContext *ctx)
6044 #if defined(CONFIG_USER_ONLY)
6045 GEN_PRIV;
6046 #else
6047 uint32_t ih = (ctx->opcode >> 21) & 0x7;
6048 TCGv_i32 t0 = tcg_const_i32(ih);
6050 CHK_SV;
6052 gen_helper_slbia(cpu_env, t0);
6053 tcg_temp_free_i32(t0);
6054 #endif /* defined(CONFIG_USER_ONLY) */
6057 /* slbie */
6058 static void gen_slbie(DisasContext *ctx)
6060 #if defined(CONFIG_USER_ONLY)
6061 GEN_PRIV;
6062 #else
6063 CHK_SV;
6065 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6066 #endif /* defined(CONFIG_USER_ONLY) */
6069 /* slbieg */
6070 static void gen_slbieg(DisasContext *ctx)
6072 #if defined(CONFIG_USER_ONLY)
6073 GEN_PRIV;
6074 #else
6075 CHK_SV;
6077 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6078 #endif /* defined(CONFIG_USER_ONLY) */
6081 /* slbsync */
6082 static void gen_slbsync(DisasContext *ctx)
6084 #if defined(CONFIG_USER_ONLY)
6085 GEN_PRIV;
6086 #else
6087 CHK_SV;
6088 gen_check_tlb_flush(ctx, true);
6089 #endif /* defined(CONFIG_USER_ONLY) */
6092 #endif /* defined(TARGET_PPC64) */
6094 /*** External control ***/
6095 /* Optional: */
6097 /* eciwx */
6098 static void gen_eciwx(DisasContext *ctx)
6100 TCGv t0;
6101 /* Should check EAR[E] ! */
6102 gen_set_access_type(ctx, ACCESS_EXT);
6103 t0 = tcg_temp_new();
6104 gen_addr_reg_index(ctx, t0);
6105 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
6106 DEF_MEMOP(MO_UL | MO_ALIGN));
6107 tcg_temp_free(t0);
6110 /* ecowx */
6111 static void gen_ecowx(DisasContext *ctx)
6113 TCGv t0;
6114 /* Should check EAR[E] ! */
6115 gen_set_access_type(ctx, ACCESS_EXT);
6116 t0 = tcg_temp_new();
6117 gen_addr_reg_index(ctx, t0);
6118 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
6119 DEF_MEMOP(MO_UL | MO_ALIGN));
6120 tcg_temp_free(t0);
6123 /* PowerPC 601 specific instructions */
6125 /* abs - abs. */
6126 static void gen_abs(DisasContext *ctx)
6128 TCGv d = cpu_gpr[rD(ctx->opcode)];
6129 TCGv a = cpu_gpr[rA(ctx->opcode)];
6131 tcg_gen_abs_tl(d, a);
6132 if (unlikely(Rc(ctx->opcode) != 0)) {
6133 gen_set_Rc0(ctx, d);
6137 /* abso - abso. */
6138 static void gen_abso(DisasContext *ctx)
6140 TCGv d = cpu_gpr[rD(ctx->opcode)];
6141 TCGv a = cpu_gpr[rA(ctx->opcode)];
6143 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000);
6144 tcg_gen_abs_tl(d, a);
6145 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
6146 if (unlikely(Rc(ctx->opcode) != 0)) {
6147 gen_set_Rc0(ctx, d);
6151 /* clcs */
6152 static void gen_clcs(DisasContext *ctx)
6154 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
6155 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6156 tcg_temp_free_i32(t0);
6157 /* Rc=1 sets CR0 to an undefined state */
6160 /* div - div. */
6161 static void gen_div(DisasContext *ctx)
6163 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6164 cpu_gpr[rB(ctx->opcode)]);
6165 if (unlikely(Rc(ctx->opcode) != 0)) {
6166 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6170 /* divo - divo. */
6171 static void gen_divo(DisasContext *ctx)
6173 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6174 cpu_gpr[rB(ctx->opcode)]);
6175 if (unlikely(Rc(ctx->opcode) != 0)) {
6176 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6180 /* divs - divs. */
6181 static void gen_divs(DisasContext *ctx)
6183 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6184 cpu_gpr[rB(ctx->opcode)]);
6185 if (unlikely(Rc(ctx->opcode) != 0)) {
6186 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6190 /* divso - divso. */
6191 static void gen_divso(DisasContext *ctx)
6193 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
6194 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6195 if (unlikely(Rc(ctx->opcode) != 0)) {
6196 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6200 /* doz - doz. */
6201 static void gen_doz(DisasContext *ctx)
6203 TCGLabel *l1 = gen_new_label();
6204 TCGLabel *l2 = gen_new_label();
6205 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
6206 cpu_gpr[rA(ctx->opcode)], l1);
6207 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
6208 cpu_gpr[rA(ctx->opcode)]);
6209 tcg_gen_br(l2);
6210 gen_set_label(l1);
6211 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6212 gen_set_label(l2);
6213 if (unlikely(Rc(ctx->opcode) != 0)) {
6214 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6218 /* dozo - dozo. */
6219 static void gen_dozo(DisasContext *ctx)
6221 TCGLabel *l1 = gen_new_label();
6222 TCGLabel *l2 = gen_new_label();
6223 TCGv t0 = tcg_temp_new();
6224 TCGv t1 = tcg_temp_new();
6225 TCGv t2 = tcg_temp_new();
6226 /* Start with XER OV disabled, the most likely case */
6227 tcg_gen_movi_tl(cpu_ov, 0);
6228 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
6229 cpu_gpr[rA(ctx->opcode)], l1);
6230 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6231 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6232 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
6233 tcg_gen_andc_tl(t1, t1, t2);
6234 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6235 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6236 tcg_gen_movi_tl(cpu_ov, 1);
6237 tcg_gen_movi_tl(cpu_so, 1);
6238 tcg_gen_br(l2);
6239 gen_set_label(l1);
6240 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6241 gen_set_label(l2);
6242 tcg_temp_free(t0);
6243 tcg_temp_free(t1);
6244 tcg_temp_free(t2);
6245 if (unlikely(Rc(ctx->opcode) != 0)) {
6246 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6250 /* dozi */
6251 static void gen_dozi(DisasContext *ctx)
6253 target_long simm = SIMM(ctx->opcode);
6254 TCGLabel *l1 = gen_new_label();
6255 TCGLabel *l2 = gen_new_label();
6256 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
6257 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
6258 tcg_gen_br(l2);
6259 gen_set_label(l1);
6260 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6261 gen_set_label(l2);
6262 if (unlikely(Rc(ctx->opcode) != 0)) {
6263 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6267 /* lscbx - lscbx. */
6268 static void gen_lscbx(DisasContext *ctx)
6270 TCGv t0 = tcg_temp_new();
6271 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
6272 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
6273 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
6275 gen_addr_reg_index(ctx, t0);
6276 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
6277 tcg_temp_free_i32(t1);
6278 tcg_temp_free_i32(t2);
6279 tcg_temp_free_i32(t3);
6280 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
6281 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
6282 if (unlikely(Rc(ctx->opcode) != 0)) {
6283 gen_set_Rc0(ctx, t0);
6285 tcg_temp_free(t0);
6288 /* maskg - maskg. */
6289 static void gen_maskg(DisasContext *ctx)
6291 TCGLabel *l1 = gen_new_label();
6292 TCGv t0 = tcg_temp_new();
6293 TCGv t1 = tcg_temp_new();
6294 TCGv t2 = tcg_temp_new();
6295 TCGv t3 = tcg_temp_new();
6296 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
6297 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6298 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
6299 tcg_gen_addi_tl(t2, t0, 1);
6300 tcg_gen_shr_tl(t2, t3, t2);
6301 tcg_gen_shr_tl(t3, t3, t1);
6302 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
6303 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
6304 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6305 gen_set_label(l1);
6306 tcg_temp_free(t0);
6307 tcg_temp_free(t1);
6308 tcg_temp_free(t2);
6309 tcg_temp_free(t3);
6310 if (unlikely(Rc(ctx->opcode) != 0)) {
6311 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6315 /* maskir - maskir. */
6316 static void gen_maskir(DisasContext *ctx)
6318 TCGv t0 = tcg_temp_new();
6319 TCGv t1 = tcg_temp_new();
6320 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6321 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6322 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6323 tcg_temp_free(t0);
6324 tcg_temp_free(t1);
6325 if (unlikely(Rc(ctx->opcode) != 0)) {
6326 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6330 /* mul - mul. */
6331 static void gen_mul(DisasContext *ctx)
6333 TCGv_i64 t0 = tcg_temp_new_i64();
6334 TCGv_i64 t1 = tcg_temp_new_i64();
6335 TCGv t2 = tcg_temp_new();
6336 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6337 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6338 tcg_gen_mul_i64(t0, t0, t1);
6339 tcg_gen_trunc_i64_tl(t2, t0);
6340 gen_store_spr(SPR_MQ, t2);
6341 tcg_gen_shri_i64(t1, t0, 32);
6342 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6343 tcg_temp_free_i64(t0);
6344 tcg_temp_free_i64(t1);
6345 tcg_temp_free(t2);
6346 if (unlikely(Rc(ctx->opcode) != 0)) {
6347 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6351 /* mulo - mulo. */
6352 static void gen_mulo(DisasContext *ctx)
6354 TCGLabel *l1 = gen_new_label();
6355 TCGv_i64 t0 = tcg_temp_new_i64();
6356 TCGv_i64 t1 = tcg_temp_new_i64();
6357 TCGv t2 = tcg_temp_new();
6358 /* Start with XER OV disabled, the most likely case */
6359 tcg_gen_movi_tl(cpu_ov, 0);
6360 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6361 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6362 tcg_gen_mul_i64(t0, t0, t1);
6363 tcg_gen_trunc_i64_tl(t2, t0);
6364 gen_store_spr(SPR_MQ, t2);
6365 tcg_gen_shri_i64(t1, t0, 32);
6366 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6367 tcg_gen_ext32s_i64(t1, t0);
6368 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
6369 tcg_gen_movi_tl(cpu_ov, 1);
6370 tcg_gen_movi_tl(cpu_so, 1);
6371 gen_set_label(l1);
6372 tcg_temp_free_i64(t0);
6373 tcg_temp_free_i64(t1);
6374 tcg_temp_free(t2);
6375 if (unlikely(Rc(ctx->opcode) != 0)) {
6376 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6380 /* nabs - nabs. */
6381 static void gen_nabs(DisasContext *ctx)
6383 TCGv d = cpu_gpr[rD(ctx->opcode)];
6384 TCGv a = cpu_gpr[rA(ctx->opcode)];
6386 tcg_gen_abs_tl(d, a);
6387 tcg_gen_neg_tl(d, d);
6388 if (unlikely(Rc(ctx->opcode) != 0)) {
6389 gen_set_Rc0(ctx, d);
6393 /* nabso - nabso. */
6394 static void gen_nabso(DisasContext *ctx)
6396 TCGv d = cpu_gpr[rD(ctx->opcode)];
6397 TCGv a = cpu_gpr[rA(ctx->opcode)];
6399 tcg_gen_abs_tl(d, a);
6400 tcg_gen_neg_tl(d, d);
6401 /* nabs never overflows */
6402 tcg_gen_movi_tl(cpu_ov, 0);
6403 if (unlikely(Rc(ctx->opcode) != 0)) {
6404 gen_set_Rc0(ctx, d);
6408 /* rlmi - rlmi. */
6409 static void gen_rlmi(DisasContext *ctx)
6411 uint32_t mb = MB(ctx->opcode);
6412 uint32_t me = ME(ctx->opcode);
6413 TCGv t0 = tcg_temp_new();
6414 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6415 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6416 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
6417 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
6418 ~MASK(mb, me));
6419 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
6420 tcg_temp_free(t0);
6421 if (unlikely(Rc(ctx->opcode) != 0)) {
6422 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6426 /* rrib - rrib. */
6427 static void gen_rrib(DisasContext *ctx)
6429 TCGv t0 = tcg_temp_new();
6430 TCGv t1 = tcg_temp_new();
6431 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6432 tcg_gen_movi_tl(t1, 0x80000000);
6433 tcg_gen_shr_tl(t1, t1, t0);
6434 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6435 tcg_gen_and_tl(t0, t0, t1);
6436 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
6437 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6438 tcg_temp_free(t0);
6439 tcg_temp_free(t1);
6440 if (unlikely(Rc(ctx->opcode) != 0)) {
6441 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6445 /* sle - sle. */
6446 static void gen_sle(DisasContext *ctx)
6448 TCGv t0 = tcg_temp_new();
6449 TCGv t1 = tcg_temp_new();
6450 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6451 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6452 tcg_gen_subfi_tl(t1, 32, t1);
6453 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6454 tcg_gen_or_tl(t1, t0, t1);
6455 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6456 gen_store_spr(SPR_MQ, t1);
6457 tcg_temp_free(t0);
6458 tcg_temp_free(t1);
6459 if (unlikely(Rc(ctx->opcode) != 0)) {
6460 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6464 /* sleq - sleq. */
6465 static void gen_sleq(DisasContext *ctx)
6467 TCGv t0 = tcg_temp_new();
6468 TCGv t1 = tcg_temp_new();
6469 TCGv t2 = tcg_temp_new();
6470 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6471 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
6472 tcg_gen_shl_tl(t2, t2, t0);
6473 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6474 gen_load_spr(t1, SPR_MQ);
6475 gen_store_spr(SPR_MQ, t0);
6476 tcg_gen_and_tl(t0, t0, t2);
6477 tcg_gen_andc_tl(t1, t1, t2);
6478 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6479 tcg_temp_free(t0);
6480 tcg_temp_free(t1);
6481 tcg_temp_free(t2);
6482 if (unlikely(Rc(ctx->opcode) != 0)) {
6483 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6487 /* sliq - sliq. */
6488 static void gen_sliq(DisasContext *ctx)
6490 int sh = SH(ctx->opcode);
6491 TCGv t0 = tcg_temp_new();
6492 TCGv t1 = tcg_temp_new();
6493 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6494 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6495 tcg_gen_or_tl(t1, t0, t1);
6496 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6497 gen_store_spr(SPR_MQ, t1);
6498 tcg_temp_free(t0);
6499 tcg_temp_free(t1);
6500 if (unlikely(Rc(ctx->opcode) != 0)) {
6501 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6505 /* slliq - slliq. */
6506 static void gen_slliq(DisasContext *ctx)
6508 int sh = SH(ctx->opcode);
6509 TCGv t0 = tcg_temp_new();
6510 TCGv t1 = tcg_temp_new();
6511 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6512 gen_load_spr(t1, SPR_MQ);
6513 gen_store_spr(SPR_MQ, t0);
6514 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
6515 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
6516 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6517 tcg_temp_free(t0);
6518 tcg_temp_free(t1);
6519 if (unlikely(Rc(ctx->opcode) != 0)) {
6520 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6524 /* sllq - sllq. */
6525 static void gen_sllq(DisasContext *ctx)
6527 TCGLabel *l1 = gen_new_label();
6528 TCGLabel *l2 = gen_new_label();
6529 TCGv t0 = tcg_temp_local_new();
6530 TCGv t1 = tcg_temp_local_new();
6531 TCGv t2 = tcg_temp_local_new();
6532 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6533 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6534 tcg_gen_shl_tl(t1, t1, t2);
6535 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6536 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6537 gen_load_spr(t0, SPR_MQ);
6538 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6539 tcg_gen_br(l2);
6540 gen_set_label(l1);
6541 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6542 gen_load_spr(t2, SPR_MQ);
6543 tcg_gen_andc_tl(t1, t2, t1);
6544 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6545 gen_set_label(l2);
6546 tcg_temp_free(t0);
6547 tcg_temp_free(t1);
6548 tcg_temp_free(t2);
6549 if (unlikely(Rc(ctx->opcode) != 0)) {
6550 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6554 /* slq - slq. */
6555 static void gen_slq(DisasContext *ctx)
6557 TCGLabel *l1 = gen_new_label();
6558 TCGv t0 = tcg_temp_new();
6559 TCGv t1 = tcg_temp_new();
6560 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6561 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6562 tcg_gen_subfi_tl(t1, 32, t1);
6563 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6564 tcg_gen_or_tl(t1, t0, t1);
6565 gen_store_spr(SPR_MQ, t1);
6566 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6567 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6568 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6569 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6570 gen_set_label(l1);
6571 tcg_temp_free(t0);
6572 tcg_temp_free(t1);
6573 if (unlikely(Rc(ctx->opcode) != 0)) {
6574 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6578 /* sraiq - sraiq. */
6579 static void gen_sraiq(DisasContext *ctx)
6581 int sh = SH(ctx->opcode);
6582 TCGLabel *l1 = gen_new_label();
6583 TCGv t0 = tcg_temp_new();
6584 TCGv t1 = tcg_temp_new();
6585 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6586 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6587 tcg_gen_or_tl(t0, t0, t1);
6588 gen_store_spr(SPR_MQ, t0);
6589 tcg_gen_movi_tl(cpu_ca, 0);
6590 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6591 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
6592 tcg_gen_movi_tl(cpu_ca, 1);
6593 gen_set_label(l1);
6594 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
6595 tcg_temp_free(t0);
6596 tcg_temp_free(t1);
6597 if (unlikely(Rc(ctx->opcode) != 0)) {
6598 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6602 /* sraq - sraq. */
6603 static void gen_sraq(DisasContext *ctx)
6605 TCGLabel *l1 = gen_new_label();
6606 TCGLabel *l2 = gen_new_label();
6607 TCGv t0 = tcg_temp_new();
6608 TCGv t1 = tcg_temp_local_new();
6609 TCGv t2 = tcg_temp_local_new();
6610 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6611 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6612 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
6613 tcg_gen_subfi_tl(t2, 32, t2);
6614 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
6615 tcg_gen_or_tl(t0, t0, t2);
6616 gen_store_spr(SPR_MQ, t0);
6617 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6618 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
6619 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
6620 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
6621 gen_set_label(l1);
6622 tcg_temp_free(t0);
6623 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
6624 tcg_gen_movi_tl(cpu_ca, 0);
6625 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6626 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
6627 tcg_gen_movi_tl(cpu_ca, 1);
6628 gen_set_label(l2);
6629 tcg_temp_free(t1);
6630 tcg_temp_free(t2);
6631 if (unlikely(Rc(ctx->opcode) != 0)) {
6632 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6636 /* sre - sre. */
6637 static void gen_sre(DisasContext *ctx)
6639 TCGv t0 = tcg_temp_new();
6640 TCGv t1 = tcg_temp_new();
6641 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6642 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6643 tcg_gen_subfi_tl(t1, 32, t1);
6644 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6645 tcg_gen_or_tl(t1, t0, t1);
6646 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6647 gen_store_spr(SPR_MQ, t1);
6648 tcg_temp_free(t0);
6649 tcg_temp_free(t1);
6650 if (unlikely(Rc(ctx->opcode) != 0)) {
6651 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6655 /* srea - srea. */
6656 static void gen_srea(DisasContext *ctx)
6658 TCGv t0 = tcg_temp_new();
6659 TCGv t1 = tcg_temp_new();
6660 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6661 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6662 gen_store_spr(SPR_MQ, t0);
6663 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
6664 tcg_temp_free(t0);
6665 tcg_temp_free(t1);
6666 if (unlikely(Rc(ctx->opcode) != 0)) {
6667 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6671 /* sreq */
6672 static void gen_sreq(DisasContext *ctx)
6674 TCGv t0 = tcg_temp_new();
6675 TCGv t1 = tcg_temp_new();
6676 TCGv t2 = tcg_temp_new();
6677 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6678 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6679 tcg_gen_shr_tl(t1, t1, t0);
6680 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6681 gen_load_spr(t2, SPR_MQ);
6682 gen_store_spr(SPR_MQ, t0);
6683 tcg_gen_and_tl(t0, t0, t1);
6684 tcg_gen_andc_tl(t2, t2, t1);
6685 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6686 tcg_temp_free(t0);
6687 tcg_temp_free(t1);
6688 tcg_temp_free(t2);
6689 if (unlikely(Rc(ctx->opcode) != 0)) {
6690 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6694 /* sriq */
6695 static void gen_sriq(DisasContext *ctx)
6697 int sh = SH(ctx->opcode);
6698 TCGv t0 = tcg_temp_new();
6699 TCGv t1 = tcg_temp_new();
6700 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6701 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6702 tcg_gen_or_tl(t1, t0, t1);
6703 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6704 gen_store_spr(SPR_MQ, t1);
6705 tcg_temp_free(t0);
6706 tcg_temp_free(t1);
6707 if (unlikely(Rc(ctx->opcode) != 0)) {
6708 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6712 /* srliq */
6713 static void gen_srliq(DisasContext *ctx)
6715 int sh = SH(ctx->opcode);
6716 TCGv t0 = tcg_temp_new();
6717 TCGv t1 = tcg_temp_new();
6718 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6719 gen_load_spr(t1, SPR_MQ);
6720 gen_store_spr(SPR_MQ, t0);
6721 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
6722 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
6723 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6724 tcg_temp_free(t0);
6725 tcg_temp_free(t1);
6726 if (unlikely(Rc(ctx->opcode) != 0)) {
6727 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6731 /* srlq */
6732 static void gen_srlq(DisasContext *ctx)
6734 TCGLabel *l1 = gen_new_label();
6735 TCGLabel *l2 = gen_new_label();
6736 TCGv t0 = tcg_temp_local_new();
6737 TCGv t1 = tcg_temp_local_new();
6738 TCGv t2 = tcg_temp_local_new();
6739 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6740 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6741 tcg_gen_shr_tl(t2, t1, t2);
6742 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6743 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6744 gen_load_spr(t0, SPR_MQ);
6745 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6746 tcg_gen_br(l2);
6747 gen_set_label(l1);
6748 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6749 tcg_gen_and_tl(t0, t0, t2);
6750 gen_load_spr(t1, SPR_MQ);
6751 tcg_gen_andc_tl(t1, t1, t2);
6752 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6753 gen_set_label(l2);
6754 tcg_temp_free(t0);
6755 tcg_temp_free(t1);
6756 tcg_temp_free(t2);
6757 if (unlikely(Rc(ctx->opcode) != 0)) {
6758 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6762 /* srq */
6763 static void gen_srq(DisasContext *ctx)
6765 TCGLabel *l1 = gen_new_label();
6766 TCGv t0 = tcg_temp_new();
6767 TCGv t1 = tcg_temp_new();
6768 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6769 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6770 tcg_gen_subfi_tl(t1, 32, t1);
6771 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6772 tcg_gen_or_tl(t1, t0, t1);
6773 gen_store_spr(SPR_MQ, t1);
6774 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6775 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6776 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6777 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6778 gen_set_label(l1);
6779 tcg_temp_free(t0);
6780 tcg_temp_free(t1);
6781 if (unlikely(Rc(ctx->opcode) != 0)) {
6782 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6786 /* PowerPC 602 specific instructions */
6788 /* dsa */
6789 static void gen_dsa(DisasContext *ctx)
6791 /* XXX: TODO */
6792 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6795 /* esa */
6796 static void gen_esa(DisasContext *ctx)
6798 /* XXX: TODO */
6799 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6802 /* mfrom */
6803 static void gen_mfrom(DisasContext *ctx)
6805 #if defined(CONFIG_USER_ONLY)
6806 GEN_PRIV;
6807 #else
6808 CHK_SV;
6809 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6810 #endif /* defined(CONFIG_USER_ONLY) */
6813 /* 602 - 603 - G2 TLB management */
6815 /* tlbld */
6816 static void gen_tlbld_6xx(DisasContext *ctx)
6818 #if defined(CONFIG_USER_ONLY)
6819 GEN_PRIV;
6820 #else
6821 CHK_SV;
6822 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6823 #endif /* defined(CONFIG_USER_ONLY) */
6826 /* tlbli */
6827 static void gen_tlbli_6xx(DisasContext *ctx)
6829 #if defined(CONFIG_USER_ONLY)
6830 GEN_PRIV;
6831 #else
6832 CHK_SV;
6833 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6834 #endif /* defined(CONFIG_USER_ONLY) */
6837 /* 74xx TLB management */
6839 /* tlbld */
6840 static void gen_tlbld_74xx(DisasContext *ctx)
6842 #if defined(CONFIG_USER_ONLY)
6843 GEN_PRIV;
6844 #else
6845 CHK_SV;
6846 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6847 #endif /* defined(CONFIG_USER_ONLY) */
6850 /* tlbli */
6851 static void gen_tlbli_74xx(DisasContext *ctx)
6853 #if defined(CONFIG_USER_ONLY)
6854 GEN_PRIV;
6855 #else
6856 CHK_SV;
6857 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6858 #endif /* defined(CONFIG_USER_ONLY) */
6861 /* POWER instructions not in PowerPC 601 */
6863 /* clf */
6864 static void gen_clf(DisasContext *ctx)
6866 /* Cache line flush: implemented as no-op */
6869 /* cli */
6870 static void gen_cli(DisasContext *ctx)
6872 #if defined(CONFIG_USER_ONLY)
6873 GEN_PRIV;
6874 #else
6875 /* Cache line invalidate: privileged and treated as no-op */
6876 CHK_SV;
6877 #endif /* defined(CONFIG_USER_ONLY) */
6880 /* dclst */
6881 static void gen_dclst(DisasContext *ctx)
6883 /* Data cache line store: treated as no-op */
6886 static void gen_mfsri(DisasContext *ctx)
6888 #if defined(CONFIG_USER_ONLY)
6889 GEN_PRIV;
6890 #else
6891 int ra = rA(ctx->opcode);
6892 int rd = rD(ctx->opcode);
6893 TCGv t0;
6895 CHK_SV;
6896 t0 = tcg_temp_new();
6897 gen_addr_reg_index(ctx, t0);
6898 tcg_gen_extract_tl(t0, t0, 28, 4);
6899 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
6900 tcg_temp_free(t0);
6901 if (ra != 0 && ra != rd) {
6902 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
6904 #endif /* defined(CONFIG_USER_ONLY) */
6907 static void gen_rac(DisasContext *ctx)
6909 #if defined(CONFIG_USER_ONLY)
6910 GEN_PRIV;
6911 #else
6912 TCGv t0;
6914 CHK_SV;
6915 t0 = tcg_temp_new();
6916 gen_addr_reg_index(ctx, t0);
6917 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6918 tcg_temp_free(t0);
6919 #endif /* defined(CONFIG_USER_ONLY) */
6922 static void gen_rfsvc(DisasContext *ctx)
6924 #if defined(CONFIG_USER_ONLY)
6925 GEN_PRIV;
6926 #else
6927 CHK_SV;
6929 gen_helper_rfsvc(cpu_env);
6930 gen_sync_exception(ctx);
6931 #endif /* defined(CONFIG_USER_ONLY) */
6934 /* svc is not implemented for now */
6936 /* BookE specific instructions */
6938 /* XXX: not implemented on 440 ? */
6939 static void gen_mfapidi(DisasContext *ctx)
6941 /* XXX: TODO */
6942 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6945 /* XXX: not implemented on 440 ? */
6946 static void gen_tlbiva(DisasContext *ctx)
6948 #if defined(CONFIG_USER_ONLY)
6949 GEN_PRIV;
6950 #else
6951 TCGv t0;
6953 CHK_SV;
6954 t0 = tcg_temp_new();
6955 gen_addr_reg_index(ctx, t0);
6956 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6957 tcg_temp_free(t0);
6958 #endif /* defined(CONFIG_USER_ONLY) */
6961 /* All 405 MAC instructions are translated here */
6962 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
6963 int ra, int rb, int rt, int Rc)
6965 TCGv t0, t1;
6967 t0 = tcg_temp_local_new();
6968 t1 = tcg_temp_local_new();
6970 switch (opc3 & 0x0D) {
6971 case 0x05:
6972 /* macchw - macchw. - macchwo - macchwo. */
6973 /* macchws - macchws. - macchwso - macchwso. */
6974 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
6975 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
6976 /* mulchw - mulchw. */
6977 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6978 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6979 tcg_gen_ext16s_tl(t1, t1);
6980 break;
6981 case 0x04:
6982 /* macchwu - macchwu. - macchwuo - macchwuo. */
6983 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
6984 /* mulchwu - mulchwu. */
6985 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6986 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6987 tcg_gen_ext16u_tl(t1, t1);
6988 break;
6989 case 0x01:
6990 /* machhw - machhw. - machhwo - machhwo. */
6991 /* machhws - machhws. - machhwso - machhwso. */
6992 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
6993 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
6994 /* mulhhw - mulhhw. */
6995 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
6996 tcg_gen_ext16s_tl(t0, t0);
6997 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6998 tcg_gen_ext16s_tl(t1, t1);
6999 break;
7000 case 0x00:
7001 /* machhwu - machhwu. - machhwuo - machhwuo. */
7002 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
7003 /* mulhhwu - mulhhwu. */
7004 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
7005 tcg_gen_ext16u_tl(t0, t0);
7006 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
7007 tcg_gen_ext16u_tl(t1, t1);
7008 break;
7009 case 0x0D:
7010 /* maclhw - maclhw. - maclhwo - maclhwo. */
7011 /* maclhws - maclhws. - maclhwso - maclhwso. */
7012 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
7013 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
7014 /* mullhw - mullhw. */
7015 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
7016 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
7017 break;
7018 case 0x0C:
7019 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
7020 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
7021 /* mullhwu - mullhwu. */
7022 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
7023 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
7024 break;
7026 if (opc2 & 0x04) {
7027 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
7028 tcg_gen_mul_tl(t1, t0, t1);
7029 if (opc2 & 0x02) {
7030 /* nmultiply-and-accumulate (0x0E) */
7031 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
7032 } else {
7033 /* multiply-and-accumulate (0x0C) */
7034 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
7037 if (opc3 & 0x12) {
7038 /* Check overflow and/or saturate */
7039 TCGLabel *l1 = gen_new_label();
7041 if (opc3 & 0x10) {
7042 /* Start with XER OV disabled, the most likely case */
7043 tcg_gen_movi_tl(cpu_ov, 0);
7045 if (opc3 & 0x01) {
7046 /* Signed */
7047 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
7048 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
7049 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
7050 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
7051 if (opc3 & 0x02) {
7052 /* Saturate */
7053 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
7054 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
7056 } else {
7057 /* Unsigned */
7058 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
7059 if (opc3 & 0x02) {
7060 /* Saturate */
7061 tcg_gen_movi_tl(t0, UINT32_MAX);
7064 if (opc3 & 0x10) {
7065 /* Check overflow */
7066 tcg_gen_movi_tl(cpu_ov, 1);
7067 tcg_gen_movi_tl(cpu_so, 1);
7069 gen_set_label(l1);
7070 tcg_gen_mov_tl(cpu_gpr[rt], t0);
7072 } else {
7073 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
7075 tcg_temp_free(t0);
7076 tcg_temp_free(t1);
7077 if (unlikely(Rc) != 0) {
7078 /* Update Rc0 */
7079 gen_set_Rc0(ctx, cpu_gpr[rt]);
7083 #define GEN_MAC_HANDLER(name, opc2, opc3) \
7084 static void glue(gen_, name)(DisasContext *ctx) \
7086 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
7087 rD(ctx->opcode), Rc(ctx->opcode)); \
7090 /* macchw - macchw. */
7091 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
7092 /* macchwo - macchwo. */
7093 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
7094 /* macchws - macchws. */
7095 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
7096 /* macchwso - macchwso. */
7097 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
7098 /* macchwsu - macchwsu. */
7099 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
7100 /* macchwsuo - macchwsuo. */
7101 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
7102 /* macchwu - macchwu. */
7103 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
7104 /* macchwuo - macchwuo. */
7105 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
7106 /* machhw - machhw. */
7107 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
7108 /* machhwo - machhwo. */
7109 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
7110 /* machhws - machhws. */
7111 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
7112 /* machhwso - machhwso. */
7113 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
7114 /* machhwsu - machhwsu. */
7115 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
7116 /* machhwsuo - machhwsuo. */
7117 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
7118 /* machhwu - machhwu. */
7119 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
7120 /* machhwuo - machhwuo. */
7121 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
7122 /* maclhw - maclhw. */
7123 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
7124 /* maclhwo - maclhwo. */
7125 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
7126 /* maclhws - maclhws. */
7127 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
7128 /* maclhwso - maclhwso. */
7129 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
7130 /* maclhwu - maclhwu. */
7131 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
7132 /* maclhwuo - maclhwuo. */
7133 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
7134 /* maclhwsu - maclhwsu. */
7135 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
7136 /* maclhwsuo - maclhwsuo. */
7137 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
7138 /* nmacchw - nmacchw. */
7139 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
7140 /* nmacchwo - nmacchwo. */
7141 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
7142 /* nmacchws - nmacchws. */
7143 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
7144 /* nmacchwso - nmacchwso. */
7145 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
7146 /* nmachhw - nmachhw. */
7147 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
7148 /* nmachhwo - nmachhwo. */
7149 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
7150 /* nmachhws - nmachhws. */
7151 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
7152 /* nmachhwso - nmachhwso. */
7153 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
7154 /* nmaclhw - nmaclhw. */
7155 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
7156 /* nmaclhwo - nmaclhwo. */
7157 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
7158 /* nmaclhws - nmaclhws. */
7159 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
7160 /* nmaclhwso - nmaclhwso. */
7161 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
7163 /* mulchw - mulchw. */
7164 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
7165 /* mulchwu - mulchwu. */
7166 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
7167 /* mulhhw - mulhhw. */
7168 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
7169 /* mulhhwu - mulhhwu. */
7170 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
7171 /* mullhw - mullhw. */
7172 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
7173 /* mullhwu - mullhwu. */
7174 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
7176 /* mfdcr */
7177 static void gen_mfdcr(DisasContext *ctx)
7179 #if defined(CONFIG_USER_ONLY)
7180 GEN_PRIV;
7181 #else
7182 TCGv dcrn;
7184 CHK_SV;
7185 dcrn = tcg_const_tl(SPR(ctx->opcode));
7186 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
7187 tcg_temp_free(dcrn);
7188 #endif /* defined(CONFIG_USER_ONLY) */
7191 /* mtdcr */
7192 static void gen_mtdcr(DisasContext *ctx)
7194 #if defined(CONFIG_USER_ONLY)
7195 GEN_PRIV;
7196 #else
7197 TCGv dcrn;
7199 CHK_SV;
7200 dcrn = tcg_const_tl(SPR(ctx->opcode));
7201 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
7202 tcg_temp_free(dcrn);
7203 #endif /* defined(CONFIG_USER_ONLY) */
7206 /* mfdcrx */
7207 /* XXX: not implemented on 440 ? */
7208 static void gen_mfdcrx(DisasContext *ctx)
7210 #if defined(CONFIG_USER_ONLY)
7211 GEN_PRIV;
7212 #else
7213 CHK_SV;
7214 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
7215 cpu_gpr[rA(ctx->opcode)]);
7216 /* Note: Rc update flag set leads to undefined state of Rc0 */
7217 #endif /* defined(CONFIG_USER_ONLY) */
7220 /* mtdcrx */
7221 /* XXX: not implemented on 440 ? */
7222 static void gen_mtdcrx(DisasContext *ctx)
7224 #if defined(CONFIG_USER_ONLY)
7225 GEN_PRIV;
7226 #else
7227 CHK_SV;
7228 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7229 cpu_gpr[rS(ctx->opcode)]);
7230 /* Note: Rc update flag set leads to undefined state of Rc0 */
7231 #endif /* defined(CONFIG_USER_ONLY) */
7234 /* mfdcrux (PPC 460) : user-mode access to DCR */
7235 static void gen_mfdcrux(DisasContext *ctx)
7237 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
7238 cpu_gpr[rA(ctx->opcode)]);
7239 /* Note: Rc update flag set leads to undefined state of Rc0 */
7242 /* mtdcrux (PPC 460) : user-mode access to DCR */
7243 static void gen_mtdcrux(DisasContext *ctx)
7245 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7246 cpu_gpr[rS(ctx->opcode)]);
7247 /* Note: Rc update flag set leads to undefined state of Rc0 */
7250 /* dccci */
7251 static void gen_dccci(DisasContext *ctx)
7253 CHK_SV;
7254 /* interpreted as no-op */
7257 /* dcread */
7258 static void gen_dcread(DisasContext *ctx)
7260 #if defined(CONFIG_USER_ONLY)
7261 GEN_PRIV;
7262 #else
7263 TCGv EA, val;
7265 CHK_SV;
7266 gen_set_access_type(ctx, ACCESS_CACHE);
7267 EA = tcg_temp_new();
7268 gen_addr_reg_index(ctx, EA);
7269 val = tcg_temp_new();
7270 gen_qemu_ld32u(ctx, val, EA);
7271 tcg_temp_free(val);
7272 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
7273 tcg_temp_free(EA);
7274 #endif /* defined(CONFIG_USER_ONLY) */
7277 /* icbt */
7278 static void gen_icbt_40x(DisasContext *ctx)
7281 * interpreted as no-op
7282 * XXX: specification say this is treated as a load by the MMU but
7283 * does not generate any exception
7287 /* iccci */
7288 static void gen_iccci(DisasContext *ctx)
7290 CHK_SV;
7291 /* interpreted as no-op */
7294 /* icread */
7295 static void gen_icread(DisasContext *ctx)
7297 CHK_SV;
7298 /* interpreted as no-op */
7301 /* rfci (supervisor only) */
7302 static void gen_rfci_40x(DisasContext *ctx)
7304 #if defined(CONFIG_USER_ONLY)
7305 GEN_PRIV;
7306 #else
7307 CHK_SV;
7308 /* Restore CPU state */
7309 gen_helper_40x_rfci(cpu_env);
7310 gen_sync_exception(ctx);
7311 #endif /* defined(CONFIG_USER_ONLY) */
7314 static void gen_rfci(DisasContext *ctx)
7316 #if defined(CONFIG_USER_ONLY)
7317 GEN_PRIV;
7318 #else
7319 CHK_SV;
7320 /* Restore CPU state */
7321 gen_helper_rfci(cpu_env);
7322 gen_sync_exception(ctx);
7323 #endif /* defined(CONFIG_USER_ONLY) */
7326 /* BookE specific */
7328 /* XXX: not implemented on 440 ? */
7329 static void gen_rfdi(DisasContext *ctx)
7331 #if defined(CONFIG_USER_ONLY)
7332 GEN_PRIV;
7333 #else
7334 CHK_SV;
7335 /* Restore CPU state */
7336 gen_helper_rfdi(cpu_env);
7337 gen_sync_exception(ctx);
7338 #endif /* defined(CONFIG_USER_ONLY) */
7341 /* XXX: not implemented on 440 ? */
7342 static void gen_rfmci(DisasContext *ctx)
7344 #if defined(CONFIG_USER_ONLY)
7345 GEN_PRIV;
7346 #else
7347 CHK_SV;
7348 /* Restore CPU state */
7349 gen_helper_rfmci(cpu_env);
7350 gen_sync_exception(ctx);
7351 #endif /* defined(CONFIG_USER_ONLY) */
7354 /* TLB management - PowerPC 405 implementation */
7356 /* tlbre */
7357 static void gen_tlbre_40x(DisasContext *ctx)
7359 #if defined(CONFIG_USER_ONLY)
7360 GEN_PRIV;
7361 #else
7362 CHK_SV;
7363 switch (rB(ctx->opcode)) {
7364 case 0:
7365 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
7366 cpu_gpr[rA(ctx->opcode)]);
7367 break;
7368 case 1:
7369 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
7370 cpu_gpr[rA(ctx->opcode)]);
7371 break;
7372 default:
7373 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7374 break;
7376 #endif /* defined(CONFIG_USER_ONLY) */
7379 /* tlbsx - tlbsx. */
7380 static void gen_tlbsx_40x(DisasContext *ctx)
7382 #if defined(CONFIG_USER_ONLY)
7383 GEN_PRIV;
7384 #else
7385 TCGv t0;
7387 CHK_SV;
7388 t0 = tcg_temp_new();
7389 gen_addr_reg_index(ctx, t0);
7390 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7391 tcg_temp_free(t0);
7392 if (Rc(ctx->opcode)) {
7393 TCGLabel *l1 = gen_new_label();
7394 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7395 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7396 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7397 gen_set_label(l1);
7399 #endif /* defined(CONFIG_USER_ONLY) */
7402 /* tlbwe */
7403 static void gen_tlbwe_40x(DisasContext *ctx)
7405 #if defined(CONFIG_USER_ONLY)
7406 GEN_PRIV;
7407 #else
7408 CHK_SV;
7410 switch (rB(ctx->opcode)) {
7411 case 0:
7412 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
7413 cpu_gpr[rS(ctx->opcode)]);
7414 break;
7415 case 1:
7416 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
7417 cpu_gpr[rS(ctx->opcode)]);
7418 break;
7419 default:
7420 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7421 break;
7423 #endif /* defined(CONFIG_USER_ONLY) */
7426 /* TLB management - PowerPC 440 implementation */
7428 /* tlbre */
7429 static void gen_tlbre_440(DisasContext *ctx)
7431 #if defined(CONFIG_USER_ONLY)
7432 GEN_PRIV;
7433 #else
7434 CHK_SV;
7436 switch (rB(ctx->opcode)) {
7437 case 0:
7438 case 1:
7439 case 2:
7441 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7442 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
7443 t0, cpu_gpr[rA(ctx->opcode)]);
7444 tcg_temp_free_i32(t0);
7446 break;
7447 default:
7448 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7449 break;
7451 #endif /* defined(CONFIG_USER_ONLY) */
7454 /* tlbsx - tlbsx. */
7455 static void gen_tlbsx_440(DisasContext *ctx)
7457 #if defined(CONFIG_USER_ONLY)
7458 GEN_PRIV;
7459 #else
7460 TCGv t0;
7462 CHK_SV;
7463 t0 = tcg_temp_new();
7464 gen_addr_reg_index(ctx, t0);
7465 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7466 tcg_temp_free(t0);
7467 if (Rc(ctx->opcode)) {
7468 TCGLabel *l1 = gen_new_label();
7469 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7470 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7471 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7472 gen_set_label(l1);
7474 #endif /* defined(CONFIG_USER_ONLY) */
7477 /* tlbwe */
7478 static void gen_tlbwe_440(DisasContext *ctx)
7480 #if defined(CONFIG_USER_ONLY)
7481 GEN_PRIV;
7482 #else
7483 CHK_SV;
7484 switch (rB(ctx->opcode)) {
7485 case 0:
7486 case 1:
7487 case 2:
7489 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7490 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
7491 cpu_gpr[rS(ctx->opcode)]);
7492 tcg_temp_free_i32(t0);
7494 break;
7495 default:
7496 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7497 break;
7499 #endif /* defined(CONFIG_USER_ONLY) */
7502 /* TLB management - PowerPC BookE 2.06 implementation */
7504 /* tlbre */
7505 static void gen_tlbre_booke206(DisasContext *ctx)
7507 #if defined(CONFIG_USER_ONLY)
7508 GEN_PRIV;
7509 #else
7510 CHK_SV;
7511 gen_helper_booke206_tlbre(cpu_env);
7512 #endif /* defined(CONFIG_USER_ONLY) */
7515 /* tlbsx - tlbsx. */
7516 static void gen_tlbsx_booke206(DisasContext *ctx)
7518 #if defined(CONFIG_USER_ONLY)
7519 GEN_PRIV;
7520 #else
7521 TCGv t0;
7523 CHK_SV;
7524 if (rA(ctx->opcode)) {
7525 t0 = tcg_temp_new();
7526 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
7527 } else {
7528 t0 = tcg_const_tl(0);
7531 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
7532 gen_helper_booke206_tlbsx(cpu_env, t0);
7533 tcg_temp_free(t0);
7534 #endif /* defined(CONFIG_USER_ONLY) */
7537 /* tlbwe */
7538 static void gen_tlbwe_booke206(DisasContext *ctx)
7540 #if defined(CONFIG_USER_ONLY)
7541 GEN_PRIV;
7542 #else
7543 CHK_SV;
7544 gen_helper_booke206_tlbwe(cpu_env);
7545 #endif /* defined(CONFIG_USER_ONLY) */
7548 static void gen_tlbivax_booke206(DisasContext *ctx)
7550 #if defined(CONFIG_USER_ONLY)
7551 GEN_PRIV;
7552 #else
7553 TCGv t0;
7555 CHK_SV;
7556 t0 = tcg_temp_new();
7557 gen_addr_reg_index(ctx, t0);
7558 gen_helper_booke206_tlbivax(cpu_env, t0);
7559 tcg_temp_free(t0);
7560 #endif /* defined(CONFIG_USER_ONLY) */
7563 static void gen_tlbilx_booke206(DisasContext *ctx)
7565 #if defined(CONFIG_USER_ONLY)
7566 GEN_PRIV;
7567 #else
7568 TCGv t0;
7570 CHK_SV;
7571 t0 = tcg_temp_new();
7572 gen_addr_reg_index(ctx, t0);
7574 switch ((ctx->opcode >> 21) & 0x3) {
7575 case 0:
7576 gen_helper_booke206_tlbilx0(cpu_env, t0);
7577 break;
7578 case 1:
7579 gen_helper_booke206_tlbilx1(cpu_env, t0);
7580 break;
7581 case 3:
7582 gen_helper_booke206_tlbilx3(cpu_env, t0);
7583 break;
7584 default:
7585 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7586 break;
7589 tcg_temp_free(t0);
7590 #endif /* defined(CONFIG_USER_ONLY) */
7594 /* wrtee */
7595 static void gen_wrtee(DisasContext *ctx)
7597 #if defined(CONFIG_USER_ONLY)
7598 GEN_PRIV;
7599 #else
7600 TCGv t0;
7602 CHK_SV;
7603 t0 = tcg_temp_new();
7604 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
7605 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7606 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
7607 tcg_temp_free(t0);
7609 * Stop translation to have a chance to raise an exception if we
7610 * just set msr_ee to 1
7612 gen_stop_exception(ctx);
7613 #endif /* defined(CONFIG_USER_ONLY) */
7616 /* wrteei */
7617 static void gen_wrteei(DisasContext *ctx)
7619 #if defined(CONFIG_USER_ONLY)
7620 GEN_PRIV;
7621 #else
7622 CHK_SV;
7623 if (ctx->opcode & 0x00008000) {
7624 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
7625 /* Stop translation to have a chance to raise an exception */
7626 gen_stop_exception(ctx);
7627 } else {
7628 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7630 #endif /* defined(CONFIG_USER_ONLY) */
7633 /* PowerPC 440 specific instructions */
7635 /* dlmzb */
7636 static void gen_dlmzb(DisasContext *ctx)
7638 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
7639 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
7640 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
7641 tcg_temp_free_i32(t0);
7644 /* mbar replaces eieio on 440 */
7645 static void gen_mbar(DisasContext *ctx)
7647 /* interpreted as no-op */
7650 /* msync replaces sync on 440 */
7651 static void gen_msync_4xx(DisasContext *ctx)
7653 /* Only e500 seems to treat reserved bits as invalid */
7654 if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
7655 (ctx->opcode & 0x03FFF801)) {
7656 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7658 /* otherwise interpreted as no-op */
7661 /* icbt */
7662 static void gen_icbt_440(DisasContext *ctx)
7665 * interpreted as no-op
7666 * XXX: specification say this is treated as a load by the MMU but
7667 * does not generate any exception
7671 /* Embedded.Processor Control */
7673 static void gen_msgclr(DisasContext *ctx)
7675 #if defined(CONFIG_USER_ONLY)
7676 GEN_PRIV;
7677 #else
7678 CHK_HV;
7679 if (is_book3s_arch2x(ctx)) {
7680 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7681 } else {
7682 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7684 #endif /* defined(CONFIG_USER_ONLY) */
7687 static void gen_msgsnd(DisasContext *ctx)
7689 #if defined(CONFIG_USER_ONLY)
7690 GEN_PRIV;
7691 #else
7692 CHK_HV;
7693 if (is_book3s_arch2x(ctx)) {
7694 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7695 } else {
7696 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7698 #endif /* defined(CONFIG_USER_ONLY) */
7701 #if defined(TARGET_PPC64)
7702 static void gen_msgclrp(DisasContext *ctx)
7704 #if defined(CONFIG_USER_ONLY)
7705 GEN_PRIV;
7706 #else
7707 CHK_SV;
7708 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7709 #endif /* defined(CONFIG_USER_ONLY) */
7712 static void gen_msgsndp(DisasContext *ctx)
7714 #if defined(CONFIG_USER_ONLY)
7715 GEN_PRIV;
7716 #else
7717 CHK_SV;
7718 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7719 #endif /* defined(CONFIG_USER_ONLY) */
7721 #endif
7723 static void gen_msgsync(DisasContext *ctx)
7725 #if defined(CONFIG_USER_ONLY)
7726 GEN_PRIV;
7727 #else
7728 CHK_HV;
7729 #endif /* defined(CONFIG_USER_ONLY) */
7730 /* interpreted as no-op */
7733 #if defined(TARGET_PPC64)
7734 static void gen_maddld(DisasContext *ctx)
7736 TCGv_i64 t1 = tcg_temp_new_i64();
7738 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7739 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
7740 tcg_temp_free_i64(t1);
7743 /* maddhd maddhdu */
7744 static void gen_maddhd_maddhdu(DisasContext *ctx)
7746 TCGv_i64 lo = tcg_temp_new_i64();
7747 TCGv_i64 hi = tcg_temp_new_i64();
7748 TCGv_i64 t1 = tcg_temp_new_i64();
7750 if (Rc(ctx->opcode)) {
7751 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7752 cpu_gpr[rB(ctx->opcode)]);
7753 tcg_gen_movi_i64(t1, 0);
7754 } else {
7755 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7756 cpu_gpr[rB(ctx->opcode)]);
7757 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
7759 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
7760 cpu_gpr[rC(ctx->opcode)], t1);
7761 tcg_temp_free_i64(lo);
7762 tcg_temp_free_i64(hi);
7763 tcg_temp_free_i64(t1);
7765 #endif /* defined(TARGET_PPC64) */
7767 static void gen_tbegin(DisasContext *ctx)
7769 if (unlikely(!ctx->tm_enabled)) {
7770 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7771 return;
7773 gen_helper_tbegin(cpu_env);
7776 #define GEN_TM_NOOP(name) \
7777 static inline void gen_##name(DisasContext *ctx) \
7779 if (unlikely(!ctx->tm_enabled)) { \
7780 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7781 return; \
7783 /* \
7784 * Because tbegin always fails in QEMU, these user \
7785 * space instructions all have a simple implementation: \
7787 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7788 * = 0b0 || 0b00 || 0b0 \
7789 */ \
7790 tcg_gen_movi_i32(cpu_crf[0], 0); \
7793 GEN_TM_NOOP(tend);
7794 GEN_TM_NOOP(tabort);
7795 GEN_TM_NOOP(tabortwc);
7796 GEN_TM_NOOP(tabortwci);
7797 GEN_TM_NOOP(tabortdc);
7798 GEN_TM_NOOP(tabortdci);
7799 GEN_TM_NOOP(tsr);
7801 static inline void gen_cp_abort(DisasContext *ctx)
7803 /* Do Nothing */
7806 #define GEN_CP_PASTE_NOOP(name) \
7807 static inline void gen_##name(DisasContext *ctx) \
7809 /* \
7810 * Generate invalid exception until we have an \
7811 * implementation of the copy paste facility \
7812 */ \
7813 gen_invalid(ctx); \
7816 GEN_CP_PASTE_NOOP(copy)
7817 GEN_CP_PASTE_NOOP(paste)
7819 static void gen_tcheck(DisasContext *ctx)
7821 if (unlikely(!ctx->tm_enabled)) {
7822 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7823 return;
7826 * Because tbegin always fails, the tcheck implementation is
7827 * simple:
7829 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
7830 * = 0b1 || 0b00 || 0b0
7832 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
7835 #if defined(CONFIG_USER_ONLY)
7836 #define GEN_TM_PRIV_NOOP(name) \
7837 static inline void gen_##name(DisasContext *ctx) \
7839 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
7842 #else
7844 #define GEN_TM_PRIV_NOOP(name) \
7845 static inline void gen_##name(DisasContext *ctx) \
7847 CHK_SV; \
7848 if (unlikely(!ctx->tm_enabled)) { \
7849 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7850 return; \
7852 /* \
7853 * Because tbegin always fails, the implementation is \
7854 * simple: \
7856 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7857 * = 0b0 || 0b00 | 0b0 \
7858 */ \
7859 tcg_gen_movi_i32(cpu_crf[0], 0); \
7862 #endif
7864 GEN_TM_PRIV_NOOP(treclaim);
7865 GEN_TM_PRIV_NOOP(trechkpt);
7867 static inline void get_fpr(TCGv_i64 dst, int regno)
7869 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
7872 static inline void set_fpr(int regno, TCGv_i64 src)
7874 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
7877 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
7879 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
7882 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
7884 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
7887 #include "translate/fp-impl.c.inc"
7889 #include "translate/vmx-impl.c.inc"
7891 #include "translate/vsx-impl.c.inc"
7893 #include "translate/dfp-impl.c.inc"
7895 #include "translate/spe-impl.c.inc"
7897 /* Handles lfdp, lxsd, lxssp */
7898 static void gen_dform39(DisasContext *ctx)
7900 switch (ctx->opcode & 0x3) {
7901 case 0: /* lfdp */
7902 if (ctx->insns_flags2 & PPC2_ISA205) {
7903 return gen_lfdp(ctx);
7905 break;
7906 case 2: /* lxsd */
7907 if (ctx->insns_flags2 & PPC2_ISA300) {
7908 return gen_lxsd(ctx);
7910 break;
7911 case 3: /* lxssp */
7912 if (ctx->insns_flags2 & PPC2_ISA300) {
7913 return gen_lxssp(ctx);
7915 break;
7917 return gen_invalid(ctx);
7920 /* handles stfdp, lxv, stxsd, stxssp lxvx */
7921 static void gen_dform3D(DisasContext *ctx)
7923 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */
7924 switch (ctx->opcode & 0x7) {
7925 case 1: /* lxv */
7926 if (ctx->insns_flags2 & PPC2_ISA300) {
7927 return gen_lxv(ctx);
7929 break;
7930 case 5: /* stxv */
7931 if (ctx->insns_flags2 & PPC2_ISA300) {
7932 return gen_stxv(ctx);
7934 break;
7936 } else { /* DS-FORM */
7937 switch (ctx->opcode & 0x3) {
7938 case 0: /* stfdp */
7939 if (ctx->insns_flags2 & PPC2_ISA205) {
7940 return gen_stfdp(ctx);
7942 break;
7943 case 2: /* stxsd */
7944 if (ctx->insns_flags2 & PPC2_ISA300) {
7945 return gen_stxsd(ctx);
7947 break;
7948 case 3: /* stxssp */
7949 if (ctx->insns_flags2 & PPC2_ISA300) {
7950 return gen_stxssp(ctx);
7952 break;
7955 return gen_invalid(ctx);
7958 #if defined(TARGET_PPC64)
7959 /* brd */
7960 static void gen_brd(DisasContext *ctx)
7962 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7965 /* brw */
7966 static void gen_brw(DisasContext *ctx)
7968 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7969 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
7973 /* brh */
7974 static void gen_brh(DisasContext *ctx)
7976 TCGv_i64 t0 = tcg_temp_new_i64();
7977 TCGv_i64 t1 = tcg_temp_new_i64();
7978 TCGv_i64 t2 = tcg_temp_new_i64();
7980 tcg_gen_movi_i64(t0, 0x00ff00ff00ff00ffull);
7981 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
7982 tcg_gen_and_i64(t2, t1, t0);
7983 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], t0);
7984 tcg_gen_shli_i64(t1, t1, 8);
7985 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
7987 tcg_temp_free_i64(t0);
7988 tcg_temp_free_i64(t1);
7989 tcg_temp_free_i64(t2);
7991 #endif
7993 static opcode_t opcodes[] = {
7994 #if defined(TARGET_PPC64)
7995 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
7996 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
7997 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
7998 #endif
7999 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8000 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8001 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8002 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER),
8003 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8004 #if defined(TARGET_PPC64)
8005 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
8006 #endif
8007 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
8008 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
8009 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8010 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8011 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8012 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8013 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8014 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
8015 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8016 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8017 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8018 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8019 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8020 #if defined(TARGET_PPC64)
8021 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8022 #endif
8023 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8024 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8025 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8026 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8027 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8028 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8029 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
8030 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
8031 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
8032 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
8033 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8034 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8035 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8036 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8037 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8038 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8039 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
8040 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8041 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
8042 #if defined(TARGET_PPC64)
8043 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8044 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8045 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
8046 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
8047 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
8048 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
8049 #endif
8050 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8051 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8052 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8053 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8054 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8055 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8056 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8057 #if defined(TARGET_PPC64)
8058 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8059 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8060 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8061 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8062 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8063 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
8064 PPC_NONE, PPC2_ISA300),
8065 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
8066 PPC_NONE, PPC2_ISA300),
8067 #endif
8068 #if defined(TARGET_PPC64)
8069 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8070 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8071 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8072 #endif
8073 /* handles lfdp, lxsd, lxssp */
8074 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
8075 /* handles stfdp, lxv, stxsd, stxssp, stxv */
8076 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
8077 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8078 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8079 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8080 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8081 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8082 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8083 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
8084 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8085 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8086 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8087 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8088 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
8089 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
8090 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8091 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8092 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8093 #if defined(TARGET_PPC64)
8094 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
8095 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
8096 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8097 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
8098 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8099 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
8100 #endif
8101 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8102 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8103 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300),
8104 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8105 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8106 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8107 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8108 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
8109 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8110 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8111 #if defined(TARGET_PPC64)
8112 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8113 #if !defined(CONFIG_USER_ONLY)
8114 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
8115 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
8116 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
8117 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
8118 #endif
8119 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
8120 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8121 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8122 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8123 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8124 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8125 #endif
8126 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
8127 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
8128 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
8129 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8130 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8131 #if defined(TARGET_PPC64)
8132 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8133 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8134 #endif
8135 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8136 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8137 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8138 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8139 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8140 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8141 #if defined(TARGET_PPC64)
8142 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8143 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
8144 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
8145 #endif
8146 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
8147 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
8148 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8149 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
8150 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8151 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8152 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
8153 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
8154 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
8155 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
8156 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
8157 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
8158 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
8159 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
8160 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8161 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
8162 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8163 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8164 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
8165 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8166 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8167 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8168 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8169 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8170 #if defined(TARGET_PPC64)
8171 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8172 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8173 PPC_SEGMENT_64B),
8174 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8175 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8176 PPC_SEGMENT_64B),
8177 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8178 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8179 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8180 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
8181 #endif
8182 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8184 * XXX Those instructions will need to be handled differently for
8185 * different ISA versions
8187 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
8188 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
8189 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300),
8190 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300),
8191 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8192 #if defined(TARGET_PPC64)
8193 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
8194 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8195 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300),
8196 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
8197 #endif
8198 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8199 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8200 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8201 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8202 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8203 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8204 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8205 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8206 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8207 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8208 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8209 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8210 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8211 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8212 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8213 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8214 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8215 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8216 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8217 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8218 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8219 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8220 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8221 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8222 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8223 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8224 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8225 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8226 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8227 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8228 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8229 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8230 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8231 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8232 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8233 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8234 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8235 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8236 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8237 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8238 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8239 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8240 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8241 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8242 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8243 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8244 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8245 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8246 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8247 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8248 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8249 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8250 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8251 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8252 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8253 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8254 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8255 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8256 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8257 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8258 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8259 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8260 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8261 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8262 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8263 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8264 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8265 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8266 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8267 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8268 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8269 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8270 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8271 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8272 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8273 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8274 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8275 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8276 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8277 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8278 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8279 PPC_NONE, PPC2_BOOKE206),
8280 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8281 PPC_NONE, PPC2_BOOKE206),
8282 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8283 PPC_NONE, PPC2_BOOKE206),
8284 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8285 PPC_NONE, PPC2_BOOKE206),
8286 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
8287 PPC_NONE, PPC2_BOOKE206),
8288 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
8289 PPC_NONE, PPC2_PRCNTL),
8290 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
8291 PPC_NONE, PPC2_PRCNTL),
8292 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000,
8293 PPC_NONE, PPC2_PRCNTL),
8294 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8295 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8296 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8297 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8298 PPC_BOOKE, PPC2_BOOKE206),
8299 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
8300 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8301 PPC_BOOKE, PPC2_BOOKE206),
8302 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
8303 PPC_440_SPEC),
8304 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8305 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8306 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8307 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8308 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8309 #if defined(TARGET_PPC64)
8310 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
8311 PPC2_ISA300),
8312 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
8313 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001,
8314 PPC_NONE, PPC2_ISA207S),
8315 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001,
8316 PPC_NONE, PPC2_ISA207S),
8317 #endif
8319 #undef GEN_INT_ARITH_ADD
8320 #undef GEN_INT_ARITH_ADD_CONST
8321 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8322 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8323 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8324 add_ca, compute_ca, compute_ov) \
8325 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8326 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8327 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8328 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8329 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8330 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8331 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8332 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8333 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8334 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
8335 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8336 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8338 #undef GEN_INT_ARITH_DIVW
8339 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8340 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8341 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8342 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8343 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8344 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8345 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8346 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8347 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8348 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8349 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8350 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8352 #if defined(TARGET_PPC64)
8353 #undef GEN_INT_ARITH_DIVD
8354 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8355 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8356 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8357 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8358 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8359 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8361 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8362 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8363 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8364 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8365 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8366 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8368 #undef GEN_INT_ARITH_MUL_HELPER
8369 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8370 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8371 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8372 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8373 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8374 #endif
8376 #undef GEN_INT_ARITH_SUBF
8377 #undef GEN_INT_ARITH_SUBF_CONST
8378 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8379 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8380 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8381 add_ca, compute_ca, compute_ov) \
8382 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8383 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8384 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8385 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8386 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8387 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8388 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8389 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8390 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8391 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8392 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8394 #undef GEN_LOGICAL1
8395 #undef GEN_LOGICAL2
8396 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8397 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8398 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8399 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8400 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8401 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8402 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8403 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8404 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8405 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8406 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8407 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8408 #if defined(TARGET_PPC64)
8409 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8410 #endif
8412 #if defined(TARGET_PPC64)
8413 #undef GEN_PPC64_R2
8414 #undef GEN_PPC64_R4
8415 #define GEN_PPC64_R2(name, opc1, opc2) \
8416 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8417 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8418 PPC_64B)
8419 #define GEN_PPC64_R4(name, opc1, opc2) \
8420 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8421 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8422 PPC_64B), \
8423 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8424 PPC_64B), \
8425 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8426 PPC_64B)
8427 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8428 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8429 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8430 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8431 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8432 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8433 #endif
8435 #undef GEN_LD
8436 #undef GEN_LDU
8437 #undef GEN_LDUX
8438 #undef GEN_LDX_E
8439 #undef GEN_LDS
8440 #define GEN_LD(name, ldop, opc, type) \
8441 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8442 #define GEN_LDU(name, ldop, opc, type) \
8443 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8444 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8445 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8446 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
8447 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
8448 #define GEN_LDS(name, ldop, op, type) \
8449 GEN_LD(name, ldop, op | 0x20, type) \
8450 GEN_LDU(name, ldop, op | 0x21, type) \
8451 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8452 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8454 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8455 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8456 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8457 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8458 #if defined(TARGET_PPC64)
8459 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8460 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8461 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B)
8462 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B)
8463 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
8465 /* HV/P7 and later only */
8466 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
8467 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
8468 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
8469 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
8470 #endif
8471 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8472 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8474 /* External PID based load */
8475 #undef GEN_LDEPX
8476 #define GEN_LDEPX(name, ldop, opc2, opc3) \
8477 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8478 0x00000001, PPC_NONE, PPC2_BOOKE206),
8480 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
8481 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
8482 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
8483 #if defined(TARGET_PPC64)
8484 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
8485 #endif
8487 #undef GEN_ST
8488 #undef GEN_STU
8489 #undef GEN_STUX
8490 #undef GEN_STX_E
8491 #undef GEN_STS
8492 #define GEN_ST(name, stop, opc, type) \
8493 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8494 #define GEN_STU(name, stop, opc, type) \
8495 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8496 #define GEN_STUX(name, stop, opc2, opc3, type) \
8497 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8498 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
8499 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
8500 #define GEN_STS(name, stop, op, type) \
8501 GEN_ST(name, stop, op | 0x20, type) \
8502 GEN_STU(name, stop, op | 0x21, type) \
8503 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8504 GEN_STX(name, stop, 0x17, op | 0x00, type)
8506 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8507 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8508 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8509 #if defined(TARGET_PPC64)
8510 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B)
8511 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B)
8512 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
8513 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
8514 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
8515 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
8516 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
8517 #endif
8518 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8519 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8521 #undef GEN_STEPX
8522 #define GEN_STEPX(name, ldop, opc2, opc3) \
8523 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8524 0x00000001, PPC_NONE, PPC2_BOOKE206),
8526 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
8527 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
8528 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
8529 #if defined(TARGET_PPC64)
8530 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04)
8531 #endif
8533 #undef GEN_CRLOGIC
8534 #define GEN_CRLOGIC(name, tcg_op, opc) \
8535 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8536 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8537 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8538 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8539 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8540 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8541 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8542 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8543 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8545 #undef GEN_MAC_HANDLER
8546 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8547 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8548 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8549 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8550 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8551 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8552 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8553 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8554 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8555 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8556 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8557 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8558 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8559 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8560 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8561 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8562 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8563 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8564 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8565 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8566 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8567 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8568 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8569 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8570 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8571 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8572 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8573 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8574 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8575 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8576 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8577 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8578 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8579 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8580 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8581 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8582 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8583 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8584 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8585 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8586 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8587 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8588 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8589 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8591 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
8592 PPC_NONE, PPC2_TM),
8593 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
8594 PPC_NONE, PPC2_TM),
8595 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
8596 PPC_NONE, PPC2_TM),
8597 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
8598 PPC_NONE, PPC2_TM),
8599 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
8600 PPC_NONE, PPC2_TM),
8601 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
8602 PPC_NONE, PPC2_TM),
8603 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
8604 PPC_NONE, PPC2_TM),
8605 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
8606 PPC_NONE, PPC2_TM),
8607 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
8608 PPC_NONE, PPC2_TM),
8609 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
8610 PPC_NONE, PPC2_TM),
8611 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
8612 PPC_NONE, PPC2_TM),
8614 #include "translate/fp-ops.c.inc"
8616 #include "translate/vmx-ops.c.inc"
8618 #include "translate/vsx-ops.c.inc"
8620 #include "translate/dfp-ops.c.inc"
8622 #include "translate/spe-ops.c.inc"
8625 /*****************************************************************************/
8626 /* Opcode types */
8627 enum {
8628 PPC_DIRECT = 0, /* Opcode routine */
8629 PPC_INDIRECT = 1, /* Indirect opcode table */
8632 #define PPC_OPCODE_MASK 0x3
8634 static inline int is_indirect_opcode(void *handler)
8636 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
8639 static inline opc_handler_t **ind_table(void *handler)
8641 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
8644 /* Instruction table creation */
8645 /* Opcodes tables creation */
8646 static void fill_new_table(opc_handler_t **table, int len)
8648 int i;
8650 for (i = 0; i < len; i++) {
8651 table[i] = &invalid_handler;
8655 static int create_new_table(opc_handler_t **table, unsigned char idx)
8657 opc_handler_t **tmp;
8659 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
8660 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
8661 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
8663 return 0;
8666 static int insert_in_table(opc_handler_t **table, unsigned char idx,
8667 opc_handler_t *handler)
8669 if (table[idx] != &invalid_handler) {
8670 return -1;
8672 table[idx] = handler;
8674 return 0;
8677 static int register_direct_insn(opc_handler_t **ppc_opcodes,
8678 unsigned char idx, opc_handler_t *handler)
8680 if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
8681 printf("*** ERROR: opcode %02x already assigned in main "
8682 "opcode table\n", idx);
8683 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8684 printf(" Registered handler '%s' - new handler '%s'\n",
8685 ppc_opcodes[idx]->oname, handler->oname);
8686 #endif
8687 return -1;
8690 return 0;
8693 static int register_ind_in_table(opc_handler_t **table,
8694 unsigned char idx1, unsigned char idx2,
8695 opc_handler_t *handler)
8697 if (table[idx1] == &invalid_handler) {
8698 if (create_new_table(table, idx1) < 0) {
8699 printf("*** ERROR: unable to create indirect table "
8700 "idx=%02x\n", idx1);
8701 return -1;
8703 } else {
8704 if (!is_indirect_opcode(table[idx1])) {
8705 printf("*** ERROR: idx %02x already assigned to a direct "
8706 "opcode\n", idx1);
8707 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8708 printf(" Registered handler '%s' - new handler '%s'\n",
8709 ind_table(table[idx1])[idx2]->oname, handler->oname);
8710 #endif
8711 return -1;
8714 if (handler != NULL &&
8715 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
8716 printf("*** ERROR: opcode %02x already assigned in "
8717 "opcode table %02x\n", idx2, idx1);
8718 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8719 printf(" Registered handler '%s' - new handler '%s'\n",
8720 ind_table(table[idx1])[idx2]->oname, handler->oname);
8721 #endif
8722 return -1;
8725 return 0;
8728 static int register_ind_insn(opc_handler_t **ppc_opcodes,
8729 unsigned char idx1, unsigned char idx2,
8730 opc_handler_t *handler)
8732 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
8735 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
8736 unsigned char idx1, unsigned char idx2,
8737 unsigned char idx3, opc_handler_t *handler)
8739 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8740 printf("*** ERROR: unable to join indirect table idx "
8741 "[%02x-%02x]\n", idx1, idx2);
8742 return -1;
8744 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
8745 handler) < 0) {
8746 printf("*** ERROR: unable to insert opcode "
8747 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8748 return -1;
8751 return 0;
8754 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
8755 unsigned char idx1, unsigned char idx2,
8756 unsigned char idx3, unsigned char idx4,
8757 opc_handler_t *handler)
8759 opc_handler_t **table;
8761 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8762 printf("*** ERROR: unable to join indirect table idx "
8763 "[%02x-%02x]\n", idx1, idx2);
8764 return -1;
8766 table = ind_table(ppc_opcodes[idx1]);
8767 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
8768 printf("*** ERROR: unable to join 2nd-level indirect table idx "
8769 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8770 return -1;
8772 table = ind_table(table[idx2]);
8773 if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
8774 printf("*** ERROR: unable to insert opcode "
8775 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
8776 return -1;
8778 return 0;
8780 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
8782 if (insn->opc2 != 0xFF) {
8783 if (insn->opc3 != 0xFF) {
8784 if (insn->opc4 != 0xFF) {
8785 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8786 insn->opc3, insn->opc4,
8787 &insn->handler) < 0) {
8788 return -1;
8790 } else {
8791 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8792 insn->opc3, &insn->handler) < 0) {
8793 return -1;
8796 } else {
8797 if (register_ind_insn(ppc_opcodes, insn->opc1,
8798 insn->opc2, &insn->handler) < 0) {
8799 return -1;
8802 } else {
8803 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
8804 return -1;
8808 return 0;
8811 static int test_opcode_table(opc_handler_t **table, int len)
8813 int i, count, tmp;
8815 for (i = 0, count = 0; i < len; i++) {
8816 /* Consistency fixup */
8817 if (table[i] == NULL) {
8818 table[i] = &invalid_handler;
8820 if (table[i] != &invalid_handler) {
8821 if (is_indirect_opcode(table[i])) {
8822 tmp = test_opcode_table(ind_table(table[i]),
8823 PPC_CPU_INDIRECT_OPCODES_LEN);
8824 if (tmp == 0) {
8825 free(table[i]);
8826 table[i] = &invalid_handler;
8827 } else {
8828 count++;
8830 } else {
8831 count++;
8836 return count;
8839 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
8841 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
8842 printf("*** WARNING: no opcode defined !\n");
8846 /*****************************************************************************/
8847 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
8849 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
8850 opcode_t *opc;
8852 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
8853 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
8854 if (((opc->handler.type & pcc->insns_flags) != 0) ||
8855 ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
8856 if (register_insn(cpu->opcodes, opc) < 0) {
8857 error_setg(errp, "ERROR initializing PowerPC instruction "
8858 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
8859 opc->opc3);
8860 return;
8864 fix_opcode_tables(cpu->opcodes);
8865 fflush(stdout);
8866 fflush(stderr);
8869 void destroy_ppc_opcodes(PowerPCCPU *cpu)
8871 opc_handler_t **table, **table_2;
8872 int i, j, k;
8874 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
8875 if (cpu->opcodes[i] == &invalid_handler) {
8876 continue;
8878 if (is_indirect_opcode(cpu->opcodes[i])) {
8879 table = ind_table(cpu->opcodes[i]);
8880 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
8881 if (table[j] == &invalid_handler) {
8882 continue;
8884 if (is_indirect_opcode(table[j])) {
8885 table_2 = ind_table(table[j]);
8886 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
8887 if (table_2[k] != &invalid_handler &&
8888 is_indirect_opcode(table_2[k])) {
8889 g_free((opc_handler_t *)((uintptr_t)table_2[k] &
8890 ~PPC_INDIRECT));
8893 g_free((opc_handler_t *)((uintptr_t)table[j] &
8894 ~PPC_INDIRECT));
8897 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
8898 ~PPC_INDIRECT));
8903 #if defined(PPC_DUMP_CPU)
8904 static void dump_ppc_insns(CPUPPCState *env)
8906 opc_handler_t **table, *handler;
8907 const char *p, *q;
8908 uint8_t opc1, opc2, opc3, opc4;
8910 printf("Instructions set:\n");
8911 /* opc1 is 6 bits long */
8912 for (opc1 = 0x00; opc1 < PPC_CPU_OPCODES_LEN; opc1++) {
8913 table = env->opcodes;
8914 handler = table[opc1];
8915 if (is_indirect_opcode(handler)) {
8916 /* opc2 is 5 bits long */
8917 for (opc2 = 0; opc2 < PPC_CPU_INDIRECT_OPCODES_LEN; opc2++) {
8918 table = env->opcodes;
8919 handler = env->opcodes[opc1];
8920 table = ind_table(handler);
8921 handler = table[opc2];
8922 if (is_indirect_opcode(handler)) {
8923 table = ind_table(handler);
8924 /* opc3 is 5 bits long */
8925 for (opc3 = 0; opc3 < PPC_CPU_INDIRECT_OPCODES_LEN;
8926 opc3++) {
8927 handler = table[opc3];
8928 if (is_indirect_opcode(handler)) {
8929 table = ind_table(handler);
8930 /* opc4 is 5 bits long */
8931 for (opc4 = 0; opc4 < PPC_CPU_INDIRECT_OPCODES_LEN;
8932 opc4++) {
8933 handler = table[opc4];
8934 if (handler->handler != &gen_invalid) {
8935 printf("INSN: %02x %02x %02x %02x -- "
8936 "(%02d %04d %02d) : %s\n",
8937 opc1, opc2, opc3, opc4,
8938 opc1, (opc3 << 5) | opc2, opc4,
8939 handler->oname);
8942 } else {
8943 if (handler->handler != &gen_invalid) {
8944 /* Special hack to properly dump SPE insns */
8945 p = strchr(handler->oname, '_');
8946 if (p == NULL) {
8947 printf("INSN: %02x %02x %02x (%02d %04d) : "
8948 "%s\n",
8949 opc1, opc2, opc3, opc1,
8950 (opc3 << 5) | opc2,
8951 handler->oname);
8952 } else {
8953 q = "speundef";
8954 if ((p - handler->oname) != strlen(q)
8955 || (memcmp(handler->oname, q, strlen(q))
8956 != 0)) {
8957 /* First instruction */
8958 printf("INSN: %02x %02x %02x"
8959 "(%02d %04d) : %.*s\n",
8960 opc1, opc2 << 1, opc3, opc1,
8961 (opc3 << 6) | (opc2 << 1),
8962 (int)(p - handler->oname),
8963 handler->oname);
8965 if (strcmp(p + 1, q) != 0) {
8966 /* Second instruction */
8967 printf("INSN: %02x %02x %02x "
8968 "(%02d %04d) : %s\n", opc1,
8969 (opc2 << 1) | 1, opc3, opc1,
8970 (opc3 << 6) | (opc2 << 1) | 1,
8971 p + 1);
8977 } else {
8978 if (handler->handler != &gen_invalid) {
8979 printf("INSN: %02x %02x -- (%02d %04d) : %s\n",
8980 opc1, opc2, opc1, opc2, handler->oname);
8984 } else {
8985 if (handler->handler != &gen_invalid) {
8986 printf("INSN: %02x -- -- (%02d ----) : %s\n",
8987 opc1, opc1, handler->oname);
8992 #endif
8993 int ppc_fixup_cpu(PowerPCCPU *cpu)
8995 CPUPPCState *env = &cpu->env;
8998 * TCG doesn't (yet) emulate some groups of instructions that are
8999 * implemented on some otherwise supported CPUs (e.g. VSX and
9000 * decimal floating point instructions on POWER7). We remove
9001 * unsupported instruction groups from the cpu state's instruction
9002 * masks and hope the guest can cope. For at least the pseries
9003 * machine, the unavailability of these instructions can be
9004 * advertised to the guest via the device tree.
9006 if ((env->insns_flags & ~PPC_TCG_INSNS)
9007 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
9008 warn_report("Disabling some instructions which are not "
9009 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
9010 env->insns_flags & ~PPC_TCG_INSNS,
9011 env->insns_flags2 & ~PPC_TCG_INSNS2);
9013 env->insns_flags &= PPC_TCG_INSNS;
9014 env->insns_flags2 &= PPC_TCG_INSNS2;
9015 return 0;
9019 void ppc_cpu_dump_statistics(CPUState *cs, int flags)
9021 #if defined(DO_PPC_STATISTICS)
9022 PowerPCCPU *cpu = POWERPC_CPU(cs);
9023 opc_handler_t **t1, **t2, **t3, *handler;
9024 int op1, op2, op3;
9026 t1 = cpu->env.opcodes;
9027 for (op1 = 0; op1 < 64; op1++) {
9028 handler = t1[op1];
9029 if (is_indirect_opcode(handler)) {
9030 t2 = ind_table(handler);
9031 for (op2 = 0; op2 < 32; op2++) {
9032 handler = t2[op2];
9033 if (is_indirect_opcode(handler)) {
9034 t3 = ind_table(handler);
9035 for (op3 = 0; op3 < 32; op3++) {
9036 handler = t3[op3];
9037 if (handler->count == 0) {
9038 continue;
9040 qemu_printf("%02x %02x %02x (%02x %04d) %16s: "
9041 "%016" PRIx64 " %" PRId64 "\n",
9042 op1, op2, op3, op1, (op3 << 5) | op2,
9043 handler->oname,
9044 handler->count, handler->count);
9046 } else {
9047 if (handler->count == 0) {
9048 continue;
9050 qemu_printf("%02x %02x (%02x %04d) %16s: "
9051 "%016" PRIx64 " %" PRId64 "\n",
9052 op1, op2, op1, op2, handler->oname,
9053 handler->count, handler->count);
9056 } else {
9057 if (handler->count == 0) {
9058 continue;
9060 qemu_printf("%02x (%02x ) %16s: %016" PRIx64
9061 " %" PRId64 "\n",
9062 op1, op1, handler->oname,
9063 handler->count, handler->count);
9066 #endif
9069 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
9071 opc_handler_t **table, *handler;
9072 uint32_t inval;
9074 ctx->opcode = insn;
9076 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
9077 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
9078 ctx->le_mode ? "little" : "big");
9080 table = cpu->opcodes;
9081 handler = table[opc1(insn)];
9082 if (is_indirect_opcode(handler)) {
9083 table = ind_table(handler);
9084 handler = table[opc2(insn)];
9085 if (is_indirect_opcode(handler)) {
9086 table = ind_table(handler);
9087 handler = table[opc3(insn)];
9088 if (is_indirect_opcode(handler)) {
9089 table = ind_table(handler);
9090 handler = table[opc4(insn)];
9095 /* Is opcode *REALLY* valid ? */
9096 if (unlikely(handler->handler == &gen_invalid)) {
9097 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
9098 "%02x - %02x - %02x - %02x (%08x) "
9099 TARGET_FMT_lx "\n",
9100 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
9101 insn, ctx->cia);
9102 return false;
9105 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
9106 && Rc(insn))) {
9107 inval = handler->inval2;
9108 } else {
9109 inval = handler->inval1;
9112 if (unlikely((insn & inval) != 0)) {
9113 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
9114 "%02x - %02x - %02x - %02x (%08x) "
9115 TARGET_FMT_lx "\n", insn & inval,
9116 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
9117 insn, ctx->cia);
9118 return false;
9121 handler->handler(ctx);
9122 return true;
9125 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
9127 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9128 CPUPPCState *env = cs->env_ptr;
9129 uint32_t hflags = ctx->base.tb->flags;
9130 int bound;
9132 ctx->exception = POWERPC_EXCP_NONE;
9133 ctx->spr_cb = env->spr_cb;
9134 ctx->pr = (hflags >> HFLAGS_PR) & 1;
9135 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
9136 ctx->dr = (hflags >> HFLAGS_DR) & 1;
9137 ctx->hv = (hflags >> HFLAGS_HV) & 1;
9138 ctx->insns_flags = env->insns_flags;
9139 ctx->insns_flags2 = env->insns_flags2;
9140 ctx->access_type = -1;
9141 ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
9142 ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
9143 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
9144 ctx->flags = env->flags;
9145 #if defined(TARGET_PPC64)
9146 ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
9147 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
9148 #endif
9149 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
9150 || env->mmu_model == POWERPC_MMU_601
9151 || env->mmu_model & POWERPC_MMU_64;
9153 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
9154 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
9155 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
9156 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
9157 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
9158 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
9160 ctx->singlestep_enabled = 0;
9161 if ((hflags >> HFLAGS_SE) & 1) {
9162 ctx->singlestep_enabled |= CPU_SINGLE_STEP;
9164 if ((hflags >> HFLAGS_BE) & 1) {
9165 ctx->singlestep_enabled |= CPU_BRANCH_STEP;
9167 if (unlikely(ctx->base.singlestep_enabled)) {
9168 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9171 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
9172 ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
9175 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
9179 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
9181 tcg_gen_insn_start(dcbase->pc_next);
9184 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs,
9185 const CPUBreakpoint *bp)
9187 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9189 gen_debug_exception(ctx);
9191 * The address covered by the breakpoint must be included in
9192 * [tb->pc, tb->pc + tb->size) in order to for it to be properly
9193 * cleared -- thus we increment the PC here so that the logic
9194 * setting tb->size below does the right thing.
9196 ctx->base.pc_next += 4;
9197 return true;
9200 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
9202 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9203 PowerPCCPU *cpu = POWERPC_CPU(cs);
9204 CPUPPCState *env = cs->env_ptr;
9205 uint32_t insn;
9206 bool ok;
9208 LOG_DISAS("----------------\n");
9209 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9210 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
9212 ctx->cia = ctx->base.pc_next;
9213 insn = translator_ldl_swap(env, ctx->base.pc_next, need_byteswap(ctx));
9214 ctx->base.pc_next += 4;
9216 ok = decode_legacy(cpu, ctx, insn);
9217 if (!ok) {
9218 gen_invalid(ctx);
9221 #if defined(DO_PPC_STATISTICS)
9222 handler->count++;
9223 #endif
9225 /* Check trace mode exceptions */
9226 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP &&
9227 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) &&
9228 ctx->exception != POWERPC_EXCP_TRAP &&
9229 ctx->exception != POWERPC_EXCP_BRANCH &&
9230 ctx->base.is_jmp != DISAS_NORETURN)) {
9231 uint32_t excp = gen_prep_dbgex(ctx);
9232 gen_exception_nip(ctx, excp, ctx->base.pc_next);
9235 if (tcg_check_temp_count()) {
9236 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked "
9237 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode),
9238 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode);
9241 if (ctx->base.is_jmp == DISAS_NEXT
9242 && ctx->exception != POWERPC_EXCP_NONE) {
9243 ctx->base.is_jmp = DISAS_TOO_MANY;
9247 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
9249 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9251 if (ctx->base.is_jmp == DISAS_NORETURN) {
9252 return;
9255 if (ctx->exception == POWERPC_EXCP_NONE) {
9256 gen_goto_tb(ctx, 0, ctx->base.pc_next);
9257 } else if (ctx->exception != POWERPC_EXCP_BRANCH) {
9258 if (unlikely(ctx->base.singlestep_enabled)) {
9259 gen_debug_exception(ctx);
9261 /* Generate the return instruction */
9262 tcg_gen_exit_tb(NULL, 0);
9266 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs)
9268 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
9269 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size);
9272 static const TranslatorOps ppc_tr_ops = {
9273 .init_disas_context = ppc_tr_init_disas_context,
9274 .tb_start = ppc_tr_tb_start,
9275 .insn_start = ppc_tr_insn_start,
9276 .breakpoint_check = ppc_tr_breakpoint_check,
9277 .translate_insn = ppc_tr_translate_insn,
9278 .tb_stop = ppc_tr_tb_stop,
9279 .disas_log = ppc_tr_disas_log,
9282 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
9284 DisasContext ctx;
9286 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns);
9289 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
9290 target_ulong *data)
9292 env->nip = data[0];