ppc/ppc405: Restore TCR and STR write handlers
[qemu.git] / target / ppc / translate.c
blobeb45f679d34f1cc7c6a8aaa0514161426020baa6
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_tcg.h"
40 #include "qemu/qemu-print.h"
41 #include "qapi/error.h"
43 #define CPU_SINGLE_STEP 0x1
44 #define CPU_BRANCH_STEP 0x2
46 /* Include definitions for instructions classes and implementations flags */
47 /* #define PPC_DEBUG_DISAS */
49 #ifdef PPC_DEBUG_DISAS
50 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
51 #else
52 # define LOG_DISAS(...) do { } while (0)
53 #endif
54 /*****************************************************************************/
55 /* Code translation helpers */
57 /* global register indexes */
58 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */
59 + 10 * 4 + 22 * 5 /* SPE GPRh */
60 + 8 * 5 /* CRF */];
61 static TCGv cpu_gpr[32];
62 static TCGv cpu_gprh[32];
63 static TCGv_i32 cpu_crf[8];
64 static TCGv cpu_nip;
65 static TCGv cpu_msr;
66 static TCGv cpu_ctr;
67 static TCGv cpu_lr;
68 #if defined(TARGET_PPC64)
69 static TCGv cpu_cfar;
70 #endif
71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
72 static TCGv cpu_reserve;
73 static TCGv cpu_reserve_val;
74 static TCGv cpu_fpscr;
75 static TCGv_i32 cpu_access_type;
77 #include "exec/gen-icount.h"
79 void ppc_translate_init(void)
81 int i;
82 char *p;
83 size_t cpu_reg_names_size;
85 p = cpu_reg_names;
86 cpu_reg_names_size = sizeof(cpu_reg_names);
88 for (i = 0; i < 8; i++) {
89 snprintf(p, cpu_reg_names_size, "crf%d", i);
90 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
91 offsetof(CPUPPCState, crf[i]), p);
92 p += 5;
93 cpu_reg_names_size -= 5;
96 for (i = 0; i < 32; i++) {
97 snprintf(p, cpu_reg_names_size, "r%d", i);
98 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
99 offsetof(CPUPPCState, gpr[i]), p);
100 p += (i < 10) ? 3 : 4;
101 cpu_reg_names_size -= (i < 10) ? 3 : 4;
102 snprintf(p, cpu_reg_names_size, "r%dH", i);
103 cpu_gprh[i] = tcg_global_mem_new(cpu_env,
104 offsetof(CPUPPCState, gprh[i]), p);
105 p += (i < 10) ? 4 : 5;
106 cpu_reg_names_size -= (i < 10) ? 4 : 5;
109 cpu_nip = tcg_global_mem_new(cpu_env,
110 offsetof(CPUPPCState, nip), "nip");
112 cpu_msr = tcg_global_mem_new(cpu_env,
113 offsetof(CPUPPCState, msr), "msr");
115 cpu_ctr = tcg_global_mem_new(cpu_env,
116 offsetof(CPUPPCState, ctr), "ctr");
118 cpu_lr = tcg_global_mem_new(cpu_env,
119 offsetof(CPUPPCState, lr), "lr");
121 #if defined(TARGET_PPC64)
122 cpu_cfar = tcg_global_mem_new(cpu_env,
123 offsetof(CPUPPCState, cfar), "cfar");
124 #endif
126 cpu_xer = tcg_global_mem_new(cpu_env,
127 offsetof(CPUPPCState, xer), "xer");
128 cpu_so = tcg_global_mem_new(cpu_env,
129 offsetof(CPUPPCState, so), "SO");
130 cpu_ov = tcg_global_mem_new(cpu_env,
131 offsetof(CPUPPCState, ov), "OV");
132 cpu_ca = tcg_global_mem_new(cpu_env,
133 offsetof(CPUPPCState, ca), "CA");
134 cpu_ov32 = tcg_global_mem_new(cpu_env,
135 offsetof(CPUPPCState, ov32), "OV32");
136 cpu_ca32 = tcg_global_mem_new(cpu_env,
137 offsetof(CPUPPCState, ca32), "CA32");
139 cpu_reserve = tcg_global_mem_new(cpu_env,
140 offsetof(CPUPPCState, reserve_addr),
141 "reserve_addr");
142 cpu_reserve_val = tcg_global_mem_new(cpu_env,
143 offsetof(CPUPPCState, reserve_val),
144 "reserve_val");
146 cpu_fpscr = tcg_global_mem_new(cpu_env,
147 offsetof(CPUPPCState, fpscr), "fpscr");
149 cpu_access_type = tcg_global_mem_new_i32(cpu_env,
150 offsetof(CPUPPCState, access_type),
151 "access_type");
154 /* internal defines */
155 struct DisasContext {
156 DisasContextBase base;
157 target_ulong cia; /* current instruction address */
158 uint32_t opcode;
159 /* Routine used to access memory */
160 bool pr, hv, dr, le_mode;
161 bool lazy_tlb_flush;
162 bool need_access_type;
163 int mem_idx;
164 int access_type;
165 /* Translation flags */
166 MemOp default_tcg_memop_mask;
167 #if defined(TARGET_PPC64)
168 bool sf_mode;
169 bool has_cfar;
170 #endif
171 bool fpu_enabled;
172 bool altivec_enabled;
173 bool vsx_enabled;
174 bool spe_enabled;
175 bool tm_enabled;
176 bool gtse;
177 bool hr;
178 bool mmcr0_pmcc0;
179 bool mmcr0_pmcc1;
180 bool pmu_insn_cnt;
181 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
182 int singlestep_enabled;
183 uint32_t flags;
184 uint64_t insns_flags;
185 uint64_t insns_flags2;
188 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */
189 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */
190 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */
191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */
193 /* Return true iff byteswap is needed in a scalar memop */
194 static inline bool need_byteswap(const DisasContext *ctx)
196 #if defined(TARGET_WORDS_BIGENDIAN)
197 return ctx->le_mode;
198 #else
199 return !ctx->le_mode;
200 #endif
203 /* True when active word size < size of target_long. */
204 #ifdef TARGET_PPC64
205 # define NARROW_MODE(C) (!(C)->sf_mode)
206 #else
207 # define NARROW_MODE(C) 0
208 #endif
210 struct opc_handler_t {
211 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
212 uint32_t inval1;
213 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
214 uint32_t inval2;
215 /* instruction type */
216 uint64_t type;
217 /* extended instruction type */
218 uint64_t type2;
219 /* handler */
220 void (*handler)(DisasContext *ctx);
223 /* SPR load/store helpers */
224 static inline void gen_load_spr(TCGv t, int reg)
226 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
229 static inline void gen_store_spr(int reg, TCGv t)
231 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
234 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
236 if (ctx->need_access_type && ctx->access_type != access_type) {
237 tcg_gen_movi_i32(cpu_access_type, access_type);
238 ctx->access_type = access_type;
242 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
244 if (NARROW_MODE(ctx)) {
245 nip = (uint32_t)nip;
247 tcg_gen_movi_tl(cpu_nip, nip);
250 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
252 TCGv_i32 t0, t1;
255 * These are all synchronous exceptions, we set the PC back to the
256 * faulting instruction
258 gen_update_nip(ctx, ctx->cia);
259 t0 = tcg_const_i32(excp);
260 t1 = tcg_const_i32(error);
261 gen_helper_raise_exception_err(cpu_env, t0, t1);
262 tcg_temp_free_i32(t0);
263 tcg_temp_free_i32(t1);
264 ctx->base.is_jmp = DISAS_NORETURN;
267 static void gen_exception(DisasContext *ctx, uint32_t excp)
269 TCGv_i32 t0;
272 * These are all synchronous exceptions, we set the PC back to the
273 * faulting instruction
275 gen_update_nip(ctx, ctx->cia);
276 t0 = tcg_const_i32(excp);
277 gen_helper_raise_exception(cpu_env, t0);
278 tcg_temp_free_i32(t0);
279 ctx->base.is_jmp = DISAS_NORETURN;
282 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
283 target_ulong nip)
285 TCGv_i32 t0;
287 gen_update_nip(ctx, nip);
288 t0 = tcg_const_i32(excp);
289 gen_helper_raise_exception(cpu_env, t0);
290 tcg_temp_free_i32(t0);
291 ctx->base.is_jmp = DISAS_NORETURN;
294 static void gen_icount_io_start(DisasContext *ctx)
296 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
297 gen_io_start();
299 * An I/O instruction must be last in the TB.
300 * Chain to the next TB, and let the code from gen_tb_start
301 * decide if we need to return to the main loop.
302 * Doing this first also allows this value to be overridden.
304 ctx->base.is_jmp = DISAS_TOO_MANY;
309 * Tells the caller what is the appropriate exception to generate and prepares
310 * SPR registers for this exception.
312 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
313 * POWERPC_EXCP_DEBUG (on BookE).
315 static uint32_t gen_prep_dbgex(DisasContext *ctx)
317 if (ctx->flags & POWERPC_FLAG_DE) {
318 target_ulong dbsr = 0;
319 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
320 dbsr = DBCR0_ICMP;
321 } else {
322 /* Must have been branch */
323 dbsr = DBCR0_BRT;
325 TCGv t0 = tcg_temp_new();
326 gen_load_spr(t0, SPR_BOOKE_DBSR);
327 tcg_gen_ori_tl(t0, t0, dbsr);
328 gen_store_spr(SPR_BOOKE_DBSR, t0);
329 tcg_temp_free(t0);
330 return POWERPC_EXCP_DEBUG;
331 } else {
332 return POWERPC_EXCP_TRACE;
336 static void gen_debug_exception(DisasContext *ctx)
338 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
339 ctx->base.is_jmp = DISAS_NORETURN;
342 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
344 /* Will be converted to program check if needed */
345 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
348 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
350 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
353 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
355 /* Will be converted to program check if needed */
356 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
359 /*****************************************************************************/
360 /* SPR READ/WRITE CALLBACKS */
362 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
364 #if 0
365 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
366 printf("ERROR: try to access SPR %d !\n", sprn);
367 #endif
370 /* #define PPC_DUMP_SPR_ACCESSES */
373 * Generic callbacks:
374 * do nothing but store/retrieve spr value
376 static void spr_load_dump_spr(int sprn)
378 #ifdef PPC_DUMP_SPR_ACCESSES
379 TCGv_i32 t0 = tcg_const_i32(sprn);
380 gen_helper_load_dump_spr(cpu_env, t0);
381 tcg_temp_free_i32(t0);
382 #endif
385 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
387 gen_load_spr(cpu_gpr[gprn], sprn);
388 spr_load_dump_spr(sprn);
391 static void spr_store_dump_spr(int sprn)
393 #ifdef PPC_DUMP_SPR_ACCESSES
394 TCGv_i32 t0 = tcg_const_i32(sprn);
395 gen_helper_store_dump_spr(cpu_env, t0);
396 tcg_temp_free_i32(t0);
397 #endif
400 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
402 gen_store_spr(sprn, cpu_gpr[gprn]);
403 spr_store_dump_spr(sprn);
406 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
408 spr_write_generic(ctx, sprn, gprn);
411 * SPR_CTRL writes must force a new translation block,
412 * allowing the PMU to calculate the run latch events with
413 * more accuracy.
415 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
418 #if !defined(CONFIG_USER_ONLY)
419 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
421 #ifdef TARGET_PPC64
422 TCGv t0 = tcg_temp_new();
423 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
424 gen_store_spr(sprn, t0);
425 tcg_temp_free(t0);
426 spr_store_dump_spr(sprn);
427 #else
428 spr_write_generic(ctx, sprn, gprn);
429 #endif
432 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
434 TCGv t0 = tcg_temp_new();
435 TCGv t1 = tcg_temp_new();
436 gen_load_spr(t0, sprn);
437 tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
438 tcg_gen_and_tl(t0, t0, t1);
439 gen_store_spr(sprn, t0);
440 tcg_temp_free(t0);
441 tcg_temp_free(t1);
444 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
448 #endif
450 /* SPR common to all PowerPC */
451 /* XER */
452 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
454 TCGv dst = cpu_gpr[gprn];
455 TCGv t0 = tcg_temp_new();
456 TCGv t1 = tcg_temp_new();
457 TCGv t2 = tcg_temp_new();
458 tcg_gen_mov_tl(dst, cpu_xer);
459 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
460 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
461 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
462 tcg_gen_or_tl(t0, t0, t1);
463 tcg_gen_or_tl(dst, dst, t2);
464 tcg_gen_or_tl(dst, dst, t0);
465 if (is_isa300(ctx)) {
466 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
467 tcg_gen_or_tl(dst, dst, t0);
468 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
469 tcg_gen_or_tl(dst, dst, t0);
471 tcg_temp_free(t0);
472 tcg_temp_free(t1);
473 tcg_temp_free(t2);
476 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
478 TCGv src = cpu_gpr[gprn];
479 /* Write all flags, while reading back check for isa300 */
480 tcg_gen_andi_tl(cpu_xer, src,
481 ~((1u << XER_SO) |
482 (1u << XER_OV) | (1u << XER_OV32) |
483 (1u << XER_CA) | (1u << XER_CA32)));
484 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
485 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
486 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
487 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
488 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
491 /* LR */
492 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
494 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
497 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
499 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
502 /* CFAR */
503 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
504 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
506 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
509 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
511 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
513 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
515 /* CTR */
516 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
518 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
521 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
523 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
526 /* User read access to SPR */
527 /* USPRx */
528 /* UMMCRx */
529 /* UPMCx */
530 /* USIA */
531 /* UDECR */
532 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
534 gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
537 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
538 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
540 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
542 #endif
544 /* SPR common to all non-embedded PowerPC */
545 /* DECR */
546 #if !defined(CONFIG_USER_ONLY)
547 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
549 gen_icount_io_start(ctx);
550 gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
553 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
555 gen_icount_io_start(ctx);
556 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
558 #endif
560 /* SPR common to all non-embedded PowerPC, except 601 */
561 /* Time base */
562 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
564 gen_icount_io_start(ctx);
565 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
568 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
570 gen_icount_io_start(ctx);
571 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
574 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
576 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
579 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
581 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
584 #if !defined(CONFIG_USER_ONLY)
585 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
587 gen_icount_io_start(ctx);
588 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
591 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
593 gen_icount_io_start(ctx);
594 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
597 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
599 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
602 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
604 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
607 #if defined(TARGET_PPC64)
608 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
610 gen_icount_io_start(ctx);
611 gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
614 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
616 gen_icount_io_start(ctx);
617 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
620 /* HDECR */
621 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
623 gen_icount_io_start(ctx);
624 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
627 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
629 gen_icount_io_start(ctx);
630 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
633 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
635 gen_icount_io_start(ctx);
636 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
639 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
641 gen_icount_io_start(ctx);
642 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
645 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
647 gen_icount_io_start(ctx);
648 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
651 #endif
652 #endif
654 #if !defined(CONFIG_USER_ONLY)
655 /* IBAT0U...IBAT0U */
656 /* IBAT0L...IBAT7L */
657 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
659 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
660 offsetof(CPUPPCState,
661 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
664 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
666 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
667 offsetof(CPUPPCState,
668 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
671 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
673 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
674 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
675 tcg_temp_free_i32(t0);
678 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
680 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
681 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
682 tcg_temp_free_i32(t0);
685 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
687 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
688 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
689 tcg_temp_free_i32(t0);
692 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
694 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
695 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
696 tcg_temp_free_i32(t0);
699 /* DBAT0U...DBAT7U */
700 /* DBAT0L...DBAT7L */
701 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
703 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
704 offsetof(CPUPPCState,
705 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
708 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
710 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
711 offsetof(CPUPPCState,
712 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
715 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
717 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
718 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
719 tcg_temp_free_i32(t0);
722 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
724 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
725 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
726 tcg_temp_free_i32(t0);
729 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
731 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
732 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
733 tcg_temp_free_i32(t0);
736 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
738 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
739 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
740 tcg_temp_free_i32(t0);
743 /* SDR1 */
744 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
746 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
749 #if defined(TARGET_PPC64)
750 /* 64 bits PowerPC specific SPRs */
751 /* PIDR */
752 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
754 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
757 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
759 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
762 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
764 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
767 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
769 TCGv t0 = tcg_temp_new();
770 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
771 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
772 tcg_temp_free(t0);
774 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
776 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
779 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
781 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
784 /* DPDES */
785 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
787 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
790 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
792 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
794 #endif
795 #endif
797 /* PowerPC 601 specific registers */
798 /* RTC */
799 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn)
801 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env);
804 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn)
806 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env);
809 #if !defined(CONFIG_USER_ONLY)
810 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn)
812 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]);
815 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn)
817 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]);
820 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn)
822 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]);
823 /* Must stop the translation as endianness may have changed */
824 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
826 #endif
828 /* Unified bats */
829 #if !defined(CONFIG_USER_ONLY)
830 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn)
832 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
833 offsetof(CPUPPCState,
834 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
837 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn)
839 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
840 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]);
841 tcg_temp_free_i32(t0);
844 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn)
846 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
847 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]);
848 tcg_temp_free_i32(t0);
850 #endif
852 /* PowerPC 40x specific registers */
853 #if !defined(CONFIG_USER_ONLY)
854 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
856 gen_icount_io_start(ctx);
857 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
860 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
862 gen_icount_io_start(ctx);
863 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
866 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
868 gen_icount_io_start(ctx);
869 gen_store_spr(sprn, cpu_gpr[gprn]);
870 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
871 /* We must stop translation as we may have rebooted */
872 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
875 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
877 gen_icount_io_start(ctx);
878 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
881 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
883 gen_icount_io_start(ctx);
884 gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
887 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
889 gen_icount_io_start(ctx);
890 gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
893 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
895 gen_icount_io_start(ctx);
896 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
899 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
901 gen_icount_io_start(ctx);
902 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
904 #endif
906 /* PowerPC 403 specific registers */
907 /* PBL1 / PBU1 / PBL2 / PBU2 */
908 #if !defined(CONFIG_USER_ONLY)
909 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn)
911 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
912 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1]));
915 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn)
917 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1);
918 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]);
919 tcg_temp_free_i32(t0);
922 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
924 TCGv t0 = tcg_temp_new();
925 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
926 gen_store_spr(SPR_PIR, t0);
927 tcg_temp_free(t0);
929 #endif
931 /* SPE specific registers */
932 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
934 TCGv_i32 t0 = tcg_temp_new_i32();
935 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
936 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
937 tcg_temp_free_i32(t0);
940 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
942 TCGv_i32 t0 = tcg_temp_new_i32();
943 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
944 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
945 tcg_temp_free_i32(t0);
948 #if !defined(CONFIG_USER_ONLY)
949 /* Callback used to write the exception vector base */
950 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
952 TCGv t0 = tcg_temp_new();
953 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
954 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
955 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
956 gen_store_spr(sprn, t0);
957 tcg_temp_free(t0);
960 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
962 int sprn_offs;
964 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
965 sprn_offs = sprn - SPR_BOOKE_IVOR0;
966 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
967 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
968 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
969 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
970 } else {
971 printf("Trying to write an unknown exception vector %d %03x\n",
972 sprn, sprn);
973 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
974 return;
977 TCGv t0 = tcg_temp_new();
978 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
979 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
980 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
981 gen_store_spr(sprn, t0);
982 tcg_temp_free(t0);
984 #endif
986 #ifdef TARGET_PPC64
987 #ifndef CONFIG_USER_ONLY
988 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
990 TCGv t0 = tcg_temp_new();
991 TCGv t1 = tcg_temp_new();
992 TCGv t2 = tcg_temp_new();
995 * Note, the HV=1 PR=0 case is handled earlier by simply using
996 * spr_write_generic for HV mode in the SPR table
999 /* Build insertion mask into t1 based on context */
1000 if (ctx->pr) {
1001 gen_load_spr(t1, SPR_UAMOR);
1002 } else {
1003 gen_load_spr(t1, SPR_AMOR);
1006 /* Mask new bits into t2 */
1007 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1009 /* Load AMR and clear new bits in t0 */
1010 gen_load_spr(t0, SPR_AMR);
1011 tcg_gen_andc_tl(t0, t0, t1);
1013 /* Or'in new bits and write it out */
1014 tcg_gen_or_tl(t0, t0, t2);
1015 gen_store_spr(SPR_AMR, t0);
1016 spr_store_dump_spr(SPR_AMR);
1018 tcg_temp_free(t0);
1019 tcg_temp_free(t1);
1020 tcg_temp_free(t2);
1023 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1025 TCGv t0 = tcg_temp_new();
1026 TCGv t1 = tcg_temp_new();
1027 TCGv t2 = tcg_temp_new();
1030 * Note, the HV=1 case is handled earlier by simply using
1031 * spr_write_generic for HV mode in the SPR table
1034 /* Build insertion mask into t1 based on context */
1035 gen_load_spr(t1, SPR_AMOR);
1037 /* Mask new bits into t2 */
1038 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1040 /* Load AMR and clear new bits in t0 */
1041 gen_load_spr(t0, SPR_UAMOR);
1042 tcg_gen_andc_tl(t0, t0, t1);
1044 /* Or'in new bits and write it out */
1045 tcg_gen_or_tl(t0, t0, t2);
1046 gen_store_spr(SPR_UAMOR, t0);
1047 spr_store_dump_spr(SPR_UAMOR);
1049 tcg_temp_free(t0);
1050 tcg_temp_free(t1);
1051 tcg_temp_free(t2);
1054 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1056 TCGv t0 = tcg_temp_new();
1057 TCGv t1 = tcg_temp_new();
1058 TCGv t2 = tcg_temp_new();
1061 * Note, the HV=1 case is handled earlier by simply using
1062 * spr_write_generic for HV mode in the SPR table
1065 /* Build insertion mask into t1 based on context */
1066 gen_load_spr(t1, SPR_AMOR);
1068 /* Mask new bits into t2 */
1069 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1071 /* Load AMR and clear new bits in t0 */
1072 gen_load_spr(t0, SPR_IAMR);
1073 tcg_gen_andc_tl(t0, t0, t1);
1075 /* Or'in new bits and write it out */
1076 tcg_gen_or_tl(t0, t0, t2);
1077 gen_store_spr(SPR_IAMR, t0);
1078 spr_store_dump_spr(SPR_IAMR);
1080 tcg_temp_free(t0);
1081 tcg_temp_free(t1);
1082 tcg_temp_free(t2);
1084 #endif
1085 #endif
1087 #ifndef CONFIG_USER_ONLY
1088 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1090 gen_helper_fixup_thrm(cpu_env);
1091 gen_load_spr(cpu_gpr[gprn], sprn);
1092 spr_load_dump_spr(sprn);
1094 #endif /* !CONFIG_USER_ONLY */
1096 #if !defined(CONFIG_USER_ONLY)
1097 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1099 TCGv t0 = tcg_temp_new();
1101 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1102 gen_store_spr(sprn, t0);
1103 tcg_temp_free(t0);
1106 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1108 TCGv t0 = tcg_temp_new();
1110 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1111 gen_store_spr(sprn, t0);
1112 tcg_temp_free(t0);
1115 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1117 TCGv t0 = tcg_temp_new();
1119 tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1120 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1121 gen_store_spr(sprn, t0);
1122 tcg_temp_free(t0);
1125 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1127 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1130 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1132 TCGv_i32 t0 = tcg_const_i32(sprn);
1133 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1134 tcg_temp_free_i32(t0);
1136 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1138 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1140 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1142 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1145 #endif
1147 #if !defined(CONFIG_USER_ONLY)
1148 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1150 TCGv val = tcg_temp_new();
1151 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1152 gen_store_spr(SPR_BOOKE_MAS3, val);
1153 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1154 gen_store_spr(SPR_BOOKE_MAS7, val);
1155 tcg_temp_free(val);
1158 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1160 TCGv mas7 = tcg_temp_new();
1161 TCGv mas3 = tcg_temp_new();
1162 gen_load_spr(mas7, SPR_BOOKE_MAS7);
1163 tcg_gen_shli_tl(mas7, mas7, 32);
1164 gen_load_spr(mas3, SPR_BOOKE_MAS3);
1165 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1166 tcg_temp_free(mas3);
1167 tcg_temp_free(mas7);
1170 #endif
1172 #ifdef TARGET_PPC64
1173 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1174 int bit, int sprn, int cause)
1176 TCGv_i32 t1 = tcg_const_i32(bit);
1177 TCGv_i32 t2 = tcg_const_i32(sprn);
1178 TCGv_i32 t3 = tcg_const_i32(cause);
1180 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1182 tcg_temp_free_i32(t3);
1183 tcg_temp_free_i32(t2);
1184 tcg_temp_free_i32(t1);
1187 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1188 int bit, int sprn, int cause)
1190 TCGv_i32 t1 = tcg_const_i32(bit);
1191 TCGv_i32 t2 = tcg_const_i32(sprn);
1192 TCGv_i32 t3 = tcg_const_i32(cause);
1194 gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1196 tcg_temp_free_i32(t3);
1197 tcg_temp_free_i32(t2);
1198 tcg_temp_free_i32(t1);
1201 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1203 TCGv spr_up = tcg_temp_new();
1204 TCGv spr = tcg_temp_new();
1206 gen_load_spr(spr, sprn - 1);
1207 tcg_gen_shri_tl(spr_up, spr, 32);
1208 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1210 tcg_temp_free(spr);
1211 tcg_temp_free(spr_up);
1214 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1216 TCGv spr = tcg_temp_new();
1218 gen_load_spr(spr, sprn - 1);
1219 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1220 gen_store_spr(sprn - 1, spr);
1222 tcg_temp_free(spr);
1225 #if !defined(CONFIG_USER_ONLY)
1226 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1228 TCGv hmer = tcg_temp_new();
1230 gen_load_spr(hmer, sprn);
1231 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1232 gen_store_spr(sprn, hmer);
1233 spr_store_dump_spr(sprn);
1234 tcg_temp_free(hmer);
1237 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1239 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1241 #endif /* !defined(CONFIG_USER_ONLY) */
1243 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1245 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1246 spr_read_generic(ctx, gprn, sprn);
1249 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1251 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1252 spr_write_generic(ctx, sprn, gprn);
1255 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1257 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1258 spr_read_generic(ctx, gprn, sprn);
1261 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1263 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1264 spr_write_generic(ctx, sprn, gprn);
1267 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1269 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1270 spr_read_prev_upper32(ctx, gprn, sprn);
1273 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1275 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1276 spr_write_prev_upper32(ctx, sprn, gprn);
1279 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1281 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1282 spr_read_generic(ctx, gprn, sprn);
1285 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1287 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1288 spr_write_generic(ctx, sprn, gprn);
1291 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1293 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1294 spr_read_prev_upper32(ctx, gprn, sprn);
1297 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1299 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1300 spr_write_prev_upper32(ctx, sprn, gprn);
1302 #endif
1304 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
1305 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1307 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
1308 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1310 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
1311 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1313 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
1314 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1316 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
1317 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1319 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1320 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1322 typedef struct opcode_t {
1323 unsigned char opc1, opc2, opc3, opc4;
1324 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1325 unsigned char pad[4];
1326 #endif
1327 opc_handler_t handler;
1328 const char *oname;
1329 } opcode_t;
1331 /* Helpers for priv. check */
1332 #define GEN_PRIV \
1333 do { \
1334 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
1335 } while (0)
1337 #if defined(CONFIG_USER_ONLY)
1338 #define CHK_HV GEN_PRIV
1339 #define CHK_SV GEN_PRIV
1340 #define CHK_HVRM GEN_PRIV
1341 #else
1342 #define CHK_HV \
1343 do { \
1344 if (unlikely(ctx->pr || !ctx->hv)) { \
1345 GEN_PRIV; \
1347 } while (0)
1348 #define CHK_SV \
1349 do { \
1350 if (unlikely(ctx->pr)) { \
1351 GEN_PRIV; \
1353 } while (0)
1354 #define CHK_HVRM \
1355 do { \
1356 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1357 GEN_PRIV; \
1359 } while (0)
1360 #endif
1362 #define CHK_NONE
1364 /*****************************************************************************/
1365 /* PowerPC instructions table */
1367 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1369 .opc1 = op1, \
1370 .opc2 = op2, \
1371 .opc3 = op3, \
1372 .opc4 = 0xff, \
1373 .handler = { \
1374 .inval1 = invl, \
1375 .type = _typ, \
1376 .type2 = _typ2, \
1377 .handler = &gen_##name, \
1378 }, \
1379 .oname = stringify(name), \
1381 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1383 .opc1 = op1, \
1384 .opc2 = op2, \
1385 .opc3 = op3, \
1386 .opc4 = 0xff, \
1387 .handler = { \
1388 .inval1 = invl1, \
1389 .inval2 = invl2, \
1390 .type = _typ, \
1391 .type2 = _typ2, \
1392 .handler = &gen_##name, \
1393 }, \
1394 .oname = stringify(name), \
1396 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1398 .opc1 = op1, \
1399 .opc2 = op2, \
1400 .opc3 = op3, \
1401 .opc4 = 0xff, \
1402 .handler = { \
1403 .inval1 = invl, \
1404 .type = _typ, \
1405 .type2 = _typ2, \
1406 .handler = &gen_##name, \
1407 }, \
1408 .oname = onam, \
1410 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1412 .opc1 = op1, \
1413 .opc2 = op2, \
1414 .opc3 = op3, \
1415 .opc4 = op4, \
1416 .handler = { \
1417 .inval1 = invl, \
1418 .type = _typ, \
1419 .type2 = _typ2, \
1420 .handler = &gen_##name, \
1421 }, \
1422 .oname = stringify(name), \
1424 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1426 .opc1 = op1, \
1427 .opc2 = op2, \
1428 .opc3 = op3, \
1429 .opc4 = op4, \
1430 .handler = { \
1431 .inval1 = invl, \
1432 .type = _typ, \
1433 .type2 = _typ2, \
1434 .handler = &gen_##name, \
1435 }, \
1436 .oname = onam, \
1439 /* Invalid instruction */
1440 static void gen_invalid(DisasContext *ctx)
1442 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1445 static opc_handler_t invalid_handler = {
1446 .inval1 = 0xFFFFFFFF,
1447 .inval2 = 0xFFFFFFFF,
1448 .type = PPC_NONE,
1449 .type2 = PPC_NONE,
1450 .handler = gen_invalid,
1453 /*** Integer comparison ***/
1455 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1457 TCGv t0 = tcg_temp_new();
1458 TCGv t1 = tcg_temp_new();
1459 TCGv_i32 t = tcg_temp_new_i32();
1461 tcg_gen_movi_tl(t0, CRF_EQ);
1462 tcg_gen_movi_tl(t1, CRF_LT);
1463 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1464 t0, arg0, arg1, t1, t0);
1465 tcg_gen_movi_tl(t1, CRF_GT);
1466 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1467 t0, arg0, arg1, t1, t0);
1469 tcg_gen_trunc_tl_i32(t, t0);
1470 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1471 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1473 tcg_temp_free(t0);
1474 tcg_temp_free(t1);
1475 tcg_temp_free_i32(t);
1478 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1480 TCGv t0 = tcg_const_tl(arg1);
1481 gen_op_cmp(arg0, t0, s, crf);
1482 tcg_temp_free(t0);
1485 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1487 TCGv t0, t1;
1488 t0 = tcg_temp_new();
1489 t1 = tcg_temp_new();
1490 if (s) {
1491 tcg_gen_ext32s_tl(t0, arg0);
1492 tcg_gen_ext32s_tl(t1, arg1);
1493 } else {
1494 tcg_gen_ext32u_tl(t0, arg0);
1495 tcg_gen_ext32u_tl(t1, arg1);
1497 gen_op_cmp(t0, t1, s, crf);
1498 tcg_temp_free(t1);
1499 tcg_temp_free(t0);
1502 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1504 TCGv t0 = tcg_const_tl(arg1);
1505 gen_op_cmp32(arg0, t0, s, crf);
1506 tcg_temp_free(t0);
1509 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1511 if (NARROW_MODE(ctx)) {
1512 gen_op_cmpi32(reg, 0, 1, 0);
1513 } else {
1514 gen_op_cmpi(reg, 0, 1, 0);
1518 /* cmprb - range comparison: isupper, isaplha, islower*/
1519 static void gen_cmprb(DisasContext *ctx)
1521 TCGv_i32 src1 = tcg_temp_new_i32();
1522 TCGv_i32 src2 = tcg_temp_new_i32();
1523 TCGv_i32 src2lo = tcg_temp_new_i32();
1524 TCGv_i32 src2hi = tcg_temp_new_i32();
1525 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1527 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1528 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1530 tcg_gen_andi_i32(src1, src1, 0xFF);
1531 tcg_gen_ext8u_i32(src2lo, src2);
1532 tcg_gen_shri_i32(src2, src2, 8);
1533 tcg_gen_ext8u_i32(src2hi, src2);
1535 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1536 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1537 tcg_gen_and_i32(crf, src2lo, src2hi);
1539 if (ctx->opcode & 0x00200000) {
1540 tcg_gen_shri_i32(src2, src2, 8);
1541 tcg_gen_ext8u_i32(src2lo, src2);
1542 tcg_gen_shri_i32(src2, src2, 8);
1543 tcg_gen_ext8u_i32(src2hi, src2);
1544 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1545 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1546 tcg_gen_and_i32(src2lo, src2lo, src2hi);
1547 tcg_gen_or_i32(crf, crf, src2lo);
1549 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1550 tcg_temp_free_i32(src1);
1551 tcg_temp_free_i32(src2);
1552 tcg_temp_free_i32(src2lo);
1553 tcg_temp_free_i32(src2hi);
1556 #if defined(TARGET_PPC64)
1557 /* cmpeqb */
1558 static void gen_cmpeqb(DisasContext *ctx)
1560 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1561 cpu_gpr[rB(ctx->opcode)]);
1563 #endif
1565 /* isel (PowerPC 2.03 specification) */
1566 static void gen_isel(DisasContext *ctx)
1568 uint32_t bi = rC(ctx->opcode);
1569 uint32_t mask = 0x08 >> (bi & 0x03);
1570 TCGv t0 = tcg_temp_new();
1571 TCGv zr;
1573 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1574 tcg_gen_andi_tl(t0, t0, mask);
1576 zr = tcg_const_tl(0);
1577 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1578 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1579 cpu_gpr[rB(ctx->opcode)]);
1580 tcg_temp_free(zr);
1581 tcg_temp_free(t0);
1584 /* cmpb: PowerPC 2.05 specification */
1585 static void gen_cmpb(DisasContext *ctx)
1587 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1588 cpu_gpr[rB(ctx->opcode)]);
1591 /*** Integer arithmetic ***/
1593 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1594 TCGv arg1, TCGv arg2, int sub)
1596 TCGv t0 = tcg_temp_new();
1598 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1599 tcg_gen_xor_tl(t0, arg1, arg2);
1600 if (sub) {
1601 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1602 } else {
1603 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1605 tcg_temp_free(t0);
1606 if (NARROW_MODE(ctx)) {
1607 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1608 if (is_isa300(ctx)) {
1609 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1611 } else {
1612 if (is_isa300(ctx)) {
1613 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1615 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1617 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1620 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1621 TCGv res, TCGv arg0, TCGv arg1,
1622 TCGv ca32, int sub)
1624 TCGv t0;
1626 if (!is_isa300(ctx)) {
1627 return;
1630 t0 = tcg_temp_new();
1631 if (sub) {
1632 tcg_gen_eqv_tl(t0, arg0, arg1);
1633 } else {
1634 tcg_gen_xor_tl(t0, arg0, arg1);
1636 tcg_gen_xor_tl(t0, t0, res);
1637 tcg_gen_extract_tl(ca32, t0, 32, 1);
1638 tcg_temp_free(t0);
1641 /* Common add function */
1642 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1643 TCGv arg2, TCGv ca, TCGv ca32,
1644 bool add_ca, bool compute_ca,
1645 bool compute_ov, bool compute_rc0)
1647 TCGv t0 = ret;
1649 if (compute_ca || compute_ov) {
1650 t0 = tcg_temp_new();
1653 if (compute_ca) {
1654 if (NARROW_MODE(ctx)) {
1656 * Caution: a non-obvious corner case of the spec is that
1657 * we must produce the *entire* 64-bit addition, but
1658 * produce the carry into bit 32.
1660 TCGv t1 = tcg_temp_new();
1661 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
1662 tcg_gen_add_tl(t0, arg1, arg2);
1663 if (add_ca) {
1664 tcg_gen_add_tl(t0, t0, ca);
1666 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */
1667 tcg_temp_free(t1);
1668 tcg_gen_extract_tl(ca, ca, 32, 1);
1669 if (is_isa300(ctx)) {
1670 tcg_gen_mov_tl(ca32, ca);
1672 } else {
1673 TCGv zero = tcg_const_tl(0);
1674 if (add_ca) {
1675 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1676 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1677 } else {
1678 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1680 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1681 tcg_temp_free(zero);
1683 } else {
1684 tcg_gen_add_tl(t0, arg1, arg2);
1685 if (add_ca) {
1686 tcg_gen_add_tl(t0, t0, ca);
1690 if (compute_ov) {
1691 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1693 if (unlikely(compute_rc0)) {
1694 gen_set_Rc0(ctx, t0);
1697 if (t0 != ret) {
1698 tcg_gen_mov_tl(ret, t0);
1699 tcg_temp_free(t0);
1702 /* Add functions with two operands */
1703 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \
1704 static void glue(gen_, name)(DisasContext *ctx) \
1706 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1707 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1708 ca, glue(ca, 32), \
1709 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1711 /* Add functions with one operand and one immediate */
1712 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \
1713 add_ca, compute_ca, compute_ov) \
1714 static void glue(gen_, name)(DisasContext *ctx) \
1716 TCGv t0 = tcg_const_tl(const_val); \
1717 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1718 cpu_gpr[rA(ctx->opcode)], t0, \
1719 ca, glue(ca, 32), \
1720 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1721 tcg_temp_free(t0); \
1724 /* add add. addo addo. */
1725 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1726 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1727 /* addc addc. addco addco. */
1728 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1729 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1730 /* adde adde. addeo addeo. */
1731 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1732 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1733 /* addme addme. addmeo addmeo. */
1734 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1735 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1736 /* addex */
1737 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1738 /* addze addze. addzeo addzeo.*/
1739 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1740 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1741 /* addic addic.*/
1742 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1744 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1745 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1746 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1747 tcg_temp_free(c);
1750 static void gen_addic(DisasContext *ctx)
1752 gen_op_addic(ctx, 0);
1755 static void gen_addic_(DisasContext *ctx)
1757 gen_op_addic(ctx, 1);
1760 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1761 TCGv arg2, int sign, int compute_ov)
1763 TCGv_i32 t0 = tcg_temp_new_i32();
1764 TCGv_i32 t1 = tcg_temp_new_i32();
1765 TCGv_i32 t2 = tcg_temp_new_i32();
1766 TCGv_i32 t3 = tcg_temp_new_i32();
1768 tcg_gen_trunc_tl_i32(t0, arg1);
1769 tcg_gen_trunc_tl_i32(t1, arg2);
1770 if (sign) {
1771 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1772 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1773 tcg_gen_and_i32(t2, t2, t3);
1774 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1775 tcg_gen_or_i32(t2, t2, t3);
1776 tcg_gen_movi_i32(t3, 0);
1777 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1778 tcg_gen_div_i32(t3, t0, t1);
1779 tcg_gen_extu_i32_tl(ret, t3);
1780 } else {
1781 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1782 tcg_gen_movi_i32(t3, 0);
1783 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1784 tcg_gen_divu_i32(t3, t0, t1);
1785 tcg_gen_extu_i32_tl(ret, t3);
1787 if (compute_ov) {
1788 tcg_gen_extu_i32_tl(cpu_ov, t2);
1789 if (is_isa300(ctx)) {
1790 tcg_gen_extu_i32_tl(cpu_ov32, t2);
1792 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1794 tcg_temp_free_i32(t0);
1795 tcg_temp_free_i32(t1);
1796 tcg_temp_free_i32(t2);
1797 tcg_temp_free_i32(t3);
1799 if (unlikely(Rc(ctx->opcode) != 0)) {
1800 gen_set_Rc0(ctx, ret);
1803 /* Div functions */
1804 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1805 static void glue(gen_, name)(DisasContext *ctx) \
1807 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1808 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1809 sign, compute_ov); \
1811 /* divwu divwu. divwuo divwuo. */
1812 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1813 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1814 /* divw divw. divwo divwo. */
1815 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1816 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1818 /* div[wd]eu[o][.] */
1819 #define GEN_DIVE(name, hlpr, compute_ov) \
1820 static void gen_##name(DisasContext *ctx) \
1822 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
1823 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
1824 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1825 tcg_temp_free_i32(t0); \
1826 if (unlikely(Rc(ctx->opcode) != 0)) { \
1827 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1831 GEN_DIVE(divweu, divweu, 0);
1832 GEN_DIVE(divweuo, divweu, 1);
1833 GEN_DIVE(divwe, divwe, 0);
1834 GEN_DIVE(divweo, divwe, 1);
1836 #if defined(TARGET_PPC64)
1837 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1838 TCGv arg2, int sign, int compute_ov)
1840 TCGv_i64 t0 = tcg_temp_new_i64();
1841 TCGv_i64 t1 = tcg_temp_new_i64();
1842 TCGv_i64 t2 = tcg_temp_new_i64();
1843 TCGv_i64 t3 = tcg_temp_new_i64();
1845 tcg_gen_mov_i64(t0, arg1);
1846 tcg_gen_mov_i64(t1, arg2);
1847 if (sign) {
1848 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1849 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1850 tcg_gen_and_i64(t2, t2, t3);
1851 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1852 tcg_gen_or_i64(t2, t2, t3);
1853 tcg_gen_movi_i64(t3, 0);
1854 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1855 tcg_gen_div_i64(ret, t0, t1);
1856 } else {
1857 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1858 tcg_gen_movi_i64(t3, 0);
1859 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1860 tcg_gen_divu_i64(ret, t0, t1);
1862 if (compute_ov) {
1863 tcg_gen_mov_tl(cpu_ov, t2);
1864 if (is_isa300(ctx)) {
1865 tcg_gen_mov_tl(cpu_ov32, t2);
1867 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1869 tcg_temp_free_i64(t0);
1870 tcg_temp_free_i64(t1);
1871 tcg_temp_free_i64(t2);
1872 tcg_temp_free_i64(t3);
1874 if (unlikely(Rc(ctx->opcode) != 0)) {
1875 gen_set_Rc0(ctx, ret);
1879 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1880 static void glue(gen_, name)(DisasContext *ctx) \
1882 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1883 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1884 sign, compute_ov); \
1886 /* divdu divdu. divduo divduo. */
1887 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1888 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1889 /* divd divd. divdo divdo. */
1890 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1891 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1893 GEN_DIVE(divdeu, divdeu, 0);
1894 GEN_DIVE(divdeuo, divdeu, 1);
1895 GEN_DIVE(divde, divde, 0);
1896 GEN_DIVE(divdeo, divde, 1);
1897 #endif
1899 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1900 TCGv arg2, int sign)
1902 TCGv_i32 t0 = tcg_temp_new_i32();
1903 TCGv_i32 t1 = tcg_temp_new_i32();
1905 tcg_gen_trunc_tl_i32(t0, arg1);
1906 tcg_gen_trunc_tl_i32(t1, arg2);
1907 if (sign) {
1908 TCGv_i32 t2 = tcg_temp_new_i32();
1909 TCGv_i32 t3 = tcg_temp_new_i32();
1910 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1911 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1912 tcg_gen_and_i32(t2, t2, t3);
1913 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1914 tcg_gen_or_i32(t2, t2, t3);
1915 tcg_gen_movi_i32(t3, 0);
1916 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1917 tcg_gen_rem_i32(t3, t0, t1);
1918 tcg_gen_ext_i32_tl(ret, t3);
1919 tcg_temp_free_i32(t2);
1920 tcg_temp_free_i32(t3);
1921 } else {
1922 TCGv_i32 t2 = tcg_const_i32(1);
1923 TCGv_i32 t3 = tcg_const_i32(0);
1924 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1925 tcg_gen_remu_i32(t3, t0, t1);
1926 tcg_gen_extu_i32_tl(ret, t3);
1927 tcg_temp_free_i32(t2);
1928 tcg_temp_free_i32(t3);
1930 tcg_temp_free_i32(t0);
1931 tcg_temp_free_i32(t1);
1934 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
1935 static void glue(gen_, name)(DisasContext *ctx) \
1937 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
1938 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1939 sign); \
1942 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1943 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1945 #if defined(TARGET_PPC64)
1946 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1947 TCGv arg2, int sign)
1949 TCGv_i64 t0 = tcg_temp_new_i64();
1950 TCGv_i64 t1 = tcg_temp_new_i64();
1952 tcg_gen_mov_i64(t0, arg1);
1953 tcg_gen_mov_i64(t1, arg2);
1954 if (sign) {
1955 TCGv_i64 t2 = tcg_temp_new_i64();
1956 TCGv_i64 t3 = tcg_temp_new_i64();
1957 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1958 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1959 tcg_gen_and_i64(t2, t2, t3);
1960 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1961 tcg_gen_or_i64(t2, t2, t3);
1962 tcg_gen_movi_i64(t3, 0);
1963 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1964 tcg_gen_rem_i64(ret, t0, t1);
1965 tcg_temp_free_i64(t2);
1966 tcg_temp_free_i64(t3);
1967 } else {
1968 TCGv_i64 t2 = tcg_const_i64(1);
1969 TCGv_i64 t3 = tcg_const_i64(0);
1970 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1971 tcg_gen_remu_i64(ret, t0, t1);
1972 tcg_temp_free_i64(t2);
1973 tcg_temp_free_i64(t3);
1975 tcg_temp_free_i64(t0);
1976 tcg_temp_free_i64(t1);
1979 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
1980 static void glue(gen_, name)(DisasContext *ctx) \
1982 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
1983 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1984 sign); \
1987 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1988 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1989 #endif
1991 /* mulhw mulhw. */
1992 static void gen_mulhw(DisasContext *ctx)
1994 TCGv_i32 t0 = tcg_temp_new_i32();
1995 TCGv_i32 t1 = tcg_temp_new_i32();
1997 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1998 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1999 tcg_gen_muls2_i32(t0, t1, t0, t1);
2000 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2001 tcg_temp_free_i32(t0);
2002 tcg_temp_free_i32(t1);
2003 if (unlikely(Rc(ctx->opcode) != 0)) {
2004 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2008 /* mulhwu mulhwu. */
2009 static void gen_mulhwu(DisasContext *ctx)
2011 TCGv_i32 t0 = tcg_temp_new_i32();
2012 TCGv_i32 t1 = tcg_temp_new_i32();
2014 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2015 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2016 tcg_gen_mulu2_i32(t0, t1, t0, t1);
2017 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2018 tcg_temp_free_i32(t0);
2019 tcg_temp_free_i32(t1);
2020 if (unlikely(Rc(ctx->opcode) != 0)) {
2021 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2025 /* mullw mullw. */
2026 static void gen_mullw(DisasContext *ctx)
2028 #if defined(TARGET_PPC64)
2029 TCGv_i64 t0, t1;
2030 t0 = tcg_temp_new_i64();
2031 t1 = tcg_temp_new_i64();
2032 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2033 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2034 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2035 tcg_temp_free(t0);
2036 tcg_temp_free(t1);
2037 #else
2038 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2039 cpu_gpr[rB(ctx->opcode)]);
2040 #endif
2041 if (unlikely(Rc(ctx->opcode) != 0)) {
2042 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2046 /* mullwo mullwo. */
2047 static void gen_mullwo(DisasContext *ctx)
2049 TCGv_i32 t0 = tcg_temp_new_i32();
2050 TCGv_i32 t1 = tcg_temp_new_i32();
2052 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2053 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2054 tcg_gen_muls2_i32(t0, t1, t0, t1);
2055 #if defined(TARGET_PPC64)
2056 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2057 #else
2058 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2059 #endif
2061 tcg_gen_sari_i32(t0, t0, 31);
2062 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2063 tcg_gen_extu_i32_tl(cpu_ov, t0);
2064 if (is_isa300(ctx)) {
2065 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2067 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2069 tcg_temp_free_i32(t0);
2070 tcg_temp_free_i32(t1);
2071 if (unlikely(Rc(ctx->opcode) != 0)) {
2072 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2076 /* mulli */
2077 static void gen_mulli(DisasContext *ctx)
2079 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2080 SIMM(ctx->opcode));
2083 #if defined(TARGET_PPC64)
2084 /* mulhd mulhd. */
2085 static void gen_mulhd(DisasContext *ctx)
2087 TCGv lo = tcg_temp_new();
2088 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2089 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2090 tcg_temp_free(lo);
2091 if (unlikely(Rc(ctx->opcode) != 0)) {
2092 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2096 /* mulhdu mulhdu. */
2097 static void gen_mulhdu(DisasContext *ctx)
2099 TCGv lo = tcg_temp_new();
2100 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2101 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2102 tcg_temp_free(lo);
2103 if (unlikely(Rc(ctx->opcode) != 0)) {
2104 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2108 /* mulld mulld. */
2109 static void gen_mulld(DisasContext *ctx)
2111 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2112 cpu_gpr[rB(ctx->opcode)]);
2113 if (unlikely(Rc(ctx->opcode) != 0)) {
2114 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2118 /* mulldo mulldo. */
2119 static void gen_mulldo(DisasContext *ctx)
2121 TCGv_i64 t0 = tcg_temp_new_i64();
2122 TCGv_i64 t1 = tcg_temp_new_i64();
2124 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2125 cpu_gpr[rB(ctx->opcode)]);
2126 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2128 tcg_gen_sari_i64(t0, t0, 63);
2129 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2130 if (is_isa300(ctx)) {
2131 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2133 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2135 tcg_temp_free_i64(t0);
2136 tcg_temp_free_i64(t1);
2138 if (unlikely(Rc(ctx->opcode) != 0)) {
2139 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2142 #endif
2144 /* Common subf function */
2145 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2146 TCGv arg2, bool add_ca, bool compute_ca,
2147 bool compute_ov, bool compute_rc0)
2149 TCGv t0 = ret;
2151 if (compute_ca || compute_ov) {
2152 t0 = tcg_temp_new();
2155 if (compute_ca) {
2156 /* dest = ~arg1 + arg2 [+ ca]. */
2157 if (NARROW_MODE(ctx)) {
2159 * Caution: a non-obvious corner case of the spec is that
2160 * we must produce the *entire* 64-bit addition, but
2161 * produce the carry into bit 32.
2163 TCGv inv1 = tcg_temp_new();
2164 TCGv t1 = tcg_temp_new();
2165 tcg_gen_not_tl(inv1, arg1);
2166 if (add_ca) {
2167 tcg_gen_add_tl(t0, arg2, cpu_ca);
2168 } else {
2169 tcg_gen_addi_tl(t0, arg2, 1);
2171 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
2172 tcg_gen_add_tl(t0, t0, inv1);
2173 tcg_temp_free(inv1);
2174 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
2175 tcg_temp_free(t1);
2176 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2177 if (is_isa300(ctx)) {
2178 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2180 } else if (add_ca) {
2181 TCGv zero, inv1 = tcg_temp_new();
2182 tcg_gen_not_tl(inv1, arg1);
2183 zero = tcg_const_tl(0);
2184 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2185 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2186 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2187 tcg_temp_free(zero);
2188 tcg_temp_free(inv1);
2189 } else {
2190 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2191 tcg_gen_sub_tl(t0, arg2, arg1);
2192 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2194 } else if (add_ca) {
2196 * Since we're ignoring carry-out, we can simplify the
2197 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2199 tcg_gen_sub_tl(t0, arg2, arg1);
2200 tcg_gen_add_tl(t0, t0, cpu_ca);
2201 tcg_gen_subi_tl(t0, t0, 1);
2202 } else {
2203 tcg_gen_sub_tl(t0, arg2, arg1);
2206 if (compute_ov) {
2207 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2209 if (unlikely(compute_rc0)) {
2210 gen_set_Rc0(ctx, t0);
2213 if (t0 != ret) {
2214 tcg_gen_mov_tl(ret, t0);
2215 tcg_temp_free(t0);
2218 /* Sub functions with Two operands functions */
2219 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
2220 static void glue(gen_, name)(DisasContext *ctx) \
2222 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2223 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2224 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2226 /* Sub functions with one operand and one immediate */
2227 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
2228 add_ca, compute_ca, compute_ov) \
2229 static void glue(gen_, name)(DisasContext *ctx) \
2231 TCGv t0 = tcg_const_tl(const_val); \
2232 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2233 cpu_gpr[rA(ctx->opcode)], t0, \
2234 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2235 tcg_temp_free(t0); \
2237 /* subf subf. subfo subfo. */
2238 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2239 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2240 /* subfc subfc. subfco subfco. */
2241 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2242 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2243 /* subfe subfe. subfeo subfo. */
2244 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2245 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2246 /* subfme subfme. subfmeo subfmeo. */
2247 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2248 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2249 /* subfze subfze. subfzeo subfzeo.*/
2250 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2251 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2253 /* subfic */
2254 static void gen_subfic(DisasContext *ctx)
2256 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2257 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2258 c, 0, 1, 0, 0);
2259 tcg_temp_free(c);
2262 /* neg neg. nego nego. */
2263 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2265 TCGv zero = tcg_const_tl(0);
2266 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2267 zero, 0, 0, compute_ov, Rc(ctx->opcode));
2268 tcg_temp_free(zero);
2271 static void gen_neg(DisasContext *ctx)
2273 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2274 if (unlikely(Rc(ctx->opcode))) {
2275 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2279 static void gen_nego(DisasContext *ctx)
2281 gen_op_arith_neg(ctx, 1);
2284 /*** Integer logical ***/
2285 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
2286 static void glue(gen_, name)(DisasContext *ctx) \
2288 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
2289 cpu_gpr[rB(ctx->opcode)]); \
2290 if (unlikely(Rc(ctx->opcode) != 0)) \
2291 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2294 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
2295 static void glue(gen_, name)(DisasContext *ctx) \
2297 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
2298 if (unlikely(Rc(ctx->opcode) != 0)) \
2299 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2302 /* and & and. */
2303 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2304 /* andc & andc. */
2305 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2307 /* andi. */
2308 static void gen_andi_(DisasContext *ctx)
2310 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2311 UIMM(ctx->opcode));
2312 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2315 /* andis. */
2316 static void gen_andis_(DisasContext *ctx)
2318 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2319 UIMM(ctx->opcode) << 16);
2320 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2323 /* cntlzw */
2324 static void gen_cntlzw(DisasContext *ctx)
2326 TCGv_i32 t = tcg_temp_new_i32();
2328 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2329 tcg_gen_clzi_i32(t, t, 32);
2330 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2331 tcg_temp_free_i32(t);
2333 if (unlikely(Rc(ctx->opcode) != 0)) {
2334 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2338 /* cnttzw */
2339 static void gen_cnttzw(DisasContext *ctx)
2341 TCGv_i32 t = tcg_temp_new_i32();
2343 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2344 tcg_gen_ctzi_i32(t, t, 32);
2345 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2346 tcg_temp_free_i32(t);
2348 if (unlikely(Rc(ctx->opcode) != 0)) {
2349 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2353 /* eqv & eqv. */
2354 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2355 /* extsb & extsb. */
2356 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2357 /* extsh & extsh. */
2358 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2359 /* nand & nand. */
2360 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2361 /* nor & nor. */
2362 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2364 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2365 static void gen_pause(DisasContext *ctx)
2367 TCGv_i32 t0 = tcg_const_i32(0);
2368 tcg_gen_st_i32(t0, cpu_env,
2369 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2370 tcg_temp_free_i32(t0);
2372 /* Stop translation, this gives other CPUs a chance to run */
2373 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2375 #endif /* defined(TARGET_PPC64) */
2377 /* or & or. */
2378 static void gen_or(DisasContext *ctx)
2380 int rs, ra, rb;
2382 rs = rS(ctx->opcode);
2383 ra = rA(ctx->opcode);
2384 rb = rB(ctx->opcode);
2385 /* Optimisation for mr. ri case */
2386 if (rs != ra || rs != rb) {
2387 if (rs != rb) {
2388 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2389 } else {
2390 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2392 if (unlikely(Rc(ctx->opcode) != 0)) {
2393 gen_set_Rc0(ctx, cpu_gpr[ra]);
2395 } else if (unlikely(Rc(ctx->opcode) != 0)) {
2396 gen_set_Rc0(ctx, cpu_gpr[rs]);
2397 #if defined(TARGET_PPC64)
2398 } else if (rs != 0) { /* 0 is nop */
2399 int prio = 0;
2401 switch (rs) {
2402 case 1:
2403 /* Set process priority to low */
2404 prio = 2;
2405 break;
2406 case 6:
2407 /* Set process priority to medium-low */
2408 prio = 3;
2409 break;
2410 case 2:
2411 /* Set process priority to normal */
2412 prio = 4;
2413 break;
2414 #if !defined(CONFIG_USER_ONLY)
2415 case 31:
2416 if (!ctx->pr) {
2417 /* Set process priority to very low */
2418 prio = 1;
2420 break;
2421 case 5:
2422 if (!ctx->pr) {
2423 /* Set process priority to medium-hight */
2424 prio = 5;
2426 break;
2427 case 3:
2428 if (!ctx->pr) {
2429 /* Set process priority to high */
2430 prio = 6;
2432 break;
2433 case 7:
2434 if (ctx->hv && !ctx->pr) {
2435 /* Set process priority to very high */
2436 prio = 7;
2438 break;
2439 #endif
2440 default:
2441 break;
2443 if (prio) {
2444 TCGv t0 = tcg_temp_new();
2445 gen_load_spr(t0, SPR_PPR);
2446 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2447 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2448 gen_store_spr(SPR_PPR, t0);
2449 tcg_temp_free(t0);
2451 #if !defined(CONFIG_USER_ONLY)
2453 * Pause out of TCG otherwise spin loops with smt_low eat too
2454 * much CPU and the kernel hangs. This applies to all
2455 * encodings other than no-op, e.g., miso(rs=26), yield(27),
2456 * mdoio(29), mdoom(30), and all currently undefined.
2458 gen_pause(ctx);
2459 #endif
2460 #endif
2463 /* orc & orc. */
2464 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2466 /* xor & xor. */
2467 static void gen_xor(DisasContext *ctx)
2469 /* Optimisation for "set to zero" case */
2470 if (rS(ctx->opcode) != rB(ctx->opcode)) {
2471 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2472 cpu_gpr[rB(ctx->opcode)]);
2473 } else {
2474 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2476 if (unlikely(Rc(ctx->opcode) != 0)) {
2477 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2481 /* ori */
2482 static void gen_ori(DisasContext *ctx)
2484 target_ulong uimm = UIMM(ctx->opcode);
2486 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2487 return;
2489 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2492 /* oris */
2493 static void gen_oris(DisasContext *ctx)
2495 target_ulong uimm = UIMM(ctx->opcode);
2497 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2498 /* NOP */
2499 return;
2501 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2502 uimm << 16);
2505 /* xori */
2506 static void gen_xori(DisasContext *ctx)
2508 target_ulong uimm = UIMM(ctx->opcode);
2510 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2511 /* NOP */
2512 return;
2514 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2517 /* xoris */
2518 static void gen_xoris(DisasContext *ctx)
2520 target_ulong uimm = UIMM(ctx->opcode);
2522 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2523 /* NOP */
2524 return;
2526 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2527 uimm << 16);
2530 /* popcntb : PowerPC 2.03 specification */
2531 static void gen_popcntb(DisasContext *ctx)
2533 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2536 static void gen_popcntw(DisasContext *ctx)
2538 #if defined(TARGET_PPC64)
2539 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2540 #else
2541 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2542 #endif
2545 #if defined(TARGET_PPC64)
2546 /* popcntd: PowerPC 2.06 specification */
2547 static void gen_popcntd(DisasContext *ctx)
2549 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2551 #endif
2553 /* prtyw: PowerPC 2.05 specification */
2554 static void gen_prtyw(DisasContext *ctx)
2556 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2557 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2558 TCGv t0 = tcg_temp_new();
2559 tcg_gen_shri_tl(t0, rs, 16);
2560 tcg_gen_xor_tl(ra, rs, t0);
2561 tcg_gen_shri_tl(t0, ra, 8);
2562 tcg_gen_xor_tl(ra, ra, t0);
2563 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2564 tcg_temp_free(t0);
2567 #if defined(TARGET_PPC64)
2568 /* prtyd: PowerPC 2.05 specification */
2569 static void gen_prtyd(DisasContext *ctx)
2571 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2572 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2573 TCGv t0 = tcg_temp_new();
2574 tcg_gen_shri_tl(t0, rs, 32);
2575 tcg_gen_xor_tl(ra, rs, t0);
2576 tcg_gen_shri_tl(t0, ra, 16);
2577 tcg_gen_xor_tl(ra, ra, t0);
2578 tcg_gen_shri_tl(t0, ra, 8);
2579 tcg_gen_xor_tl(ra, ra, t0);
2580 tcg_gen_andi_tl(ra, ra, 1);
2581 tcg_temp_free(t0);
2583 #endif
2585 #if defined(TARGET_PPC64)
2586 /* bpermd */
2587 static void gen_bpermd(DisasContext *ctx)
2589 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2590 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2592 #endif
2594 #if defined(TARGET_PPC64)
2595 /* extsw & extsw. */
2596 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2598 /* cntlzd */
2599 static void gen_cntlzd(DisasContext *ctx)
2601 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2602 if (unlikely(Rc(ctx->opcode) != 0)) {
2603 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2607 /* cnttzd */
2608 static void gen_cnttzd(DisasContext *ctx)
2610 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2611 if (unlikely(Rc(ctx->opcode) != 0)) {
2612 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2616 /* darn */
2617 static void gen_darn(DisasContext *ctx)
2619 int l = L(ctx->opcode);
2621 if (l > 2) {
2622 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2623 } else {
2624 gen_icount_io_start(ctx);
2625 if (l == 0) {
2626 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2627 } else {
2628 /* Return 64-bit random for both CRN and RRN */
2629 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2633 #endif
2635 /*** Integer rotate ***/
2637 /* rlwimi & rlwimi. */
2638 static void gen_rlwimi(DisasContext *ctx)
2640 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2641 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2642 uint32_t sh = SH(ctx->opcode);
2643 uint32_t mb = MB(ctx->opcode);
2644 uint32_t me = ME(ctx->opcode);
2646 if (sh == (31 - me) && mb <= me) {
2647 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2648 } else {
2649 target_ulong mask;
2650 bool mask_in_32b = true;
2651 TCGv t1;
2653 #if defined(TARGET_PPC64)
2654 mb += 32;
2655 me += 32;
2656 #endif
2657 mask = MASK(mb, me);
2659 #if defined(TARGET_PPC64)
2660 if (mask > 0xffffffffu) {
2661 mask_in_32b = false;
2663 #endif
2664 t1 = tcg_temp_new();
2665 if (mask_in_32b) {
2666 TCGv_i32 t0 = tcg_temp_new_i32();
2667 tcg_gen_trunc_tl_i32(t0, t_rs);
2668 tcg_gen_rotli_i32(t0, t0, sh);
2669 tcg_gen_extu_i32_tl(t1, t0);
2670 tcg_temp_free_i32(t0);
2671 } else {
2672 #if defined(TARGET_PPC64)
2673 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2674 tcg_gen_rotli_i64(t1, t1, sh);
2675 #else
2676 g_assert_not_reached();
2677 #endif
2680 tcg_gen_andi_tl(t1, t1, mask);
2681 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2682 tcg_gen_or_tl(t_ra, t_ra, t1);
2683 tcg_temp_free(t1);
2685 if (unlikely(Rc(ctx->opcode) != 0)) {
2686 gen_set_Rc0(ctx, t_ra);
2690 /* rlwinm & rlwinm. */
2691 static void gen_rlwinm(DisasContext *ctx)
2693 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2694 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2695 int sh = SH(ctx->opcode);
2696 int mb = MB(ctx->opcode);
2697 int me = ME(ctx->opcode);
2698 int len = me - mb + 1;
2699 int rsh = (32 - sh) & 31;
2701 if (sh != 0 && len > 0 && me == (31 - sh)) {
2702 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2703 } else if (me == 31 && rsh + len <= 32) {
2704 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2705 } else {
2706 target_ulong mask;
2707 bool mask_in_32b = true;
2708 #if defined(TARGET_PPC64)
2709 mb += 32;
2710 me += 32;
2711 #endif
2712 mask = MASK(mb, me);
2713 #if defined(TARGET_PPC64)
2714 if (mask > 0xffffffffu) {
2715 mask_in_32b = false;
2717 #endif
2718 if (mask_in_32b) {
2719 if (sh == 0) {
2720 tcg_gen_andi_tl(t_ra, t_rs, mask);
2721 } else {
2722 TCGv_i32 t0 = tcg_temp_new_i32();
2723 tcg_gen_trunc_tl_i32(t0, t_rs);
2724 tcg_gen_rotli_i32(t0, t0, sh);
2725 tcg_gen_andi_i32(t0, t0, mask);
2726 tcg_gen_extu_i32_tl(t_ra, t0);
2727 tcg_temp_free_i32(t0);
2729 } else {
2730 #if defined(TARGET_PPC64)
2731 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2732 tcg_gen_rotli_i64(t_ra, t_ra, sh);
2733 tcg_gen_andi_i64(t_ra, t_ra, mask);
2734 #else
2735 g_assert_not_reached();
2736 #endif
2739 if (unlikely(Rc(ctx->opcode) != 0)) {
2740 gen_set_Rc0(ctx, t_ra);
2744 /* rlwnm & rlwnm. */
2745 static void gen_rlwnm(DisasContext *ctx)
2747 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2748 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2749 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2750 uint32_t mb = MB(ctx->opcode);
2751 uint32_t me = ME(ctx->opcode);
2752 target_ulong mask;
2753 bool mask_in_32b = true;
2755 #if defined(TARGET_PPC64)
2756 mb += 32;
2757 me += 32;
2758 #endif
2759 mask = MASK(mb, me);
2761 #if defined(TARGET_PPC64)
2762 if (mask > 0xffffffffu) {
2763 mask_in_32b = false;
2765 #endif
2766 if (mask_in_32b) {
2767 TCGv_i32 t0 = tcg_temp_new_i32();
2768 TCGv_i32 t1 = tcg_temp_new_i32();
2769 tcg_gen_trunc_tl_i32(t0, t_rb);
2770 tcg_gen_trunc_tl_i32(t1, t_rs);
2771 tcg_gen_andi_i32(t0, t0, 0x1f);
2772 tcg_gen_rotl_i32(t1, t1, t0);
2773 tcg_gen_extu_i32_tl(t_ra, t1);
2774 tcg_temp_free_i32(t0);
2775 tcg_temp_free_i32(t1);
2776 } else {
2777 #if defined(TARGET_PPC64)
2778 TCGv_i64 t0 = tcg_temp_new_i64();
2779 tcg_gen_andi_i64(t0, t_rb, 0x1f);
2780 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2781 tcg_gen_rotl_i64(t_ra, t_ra, t0);
2782 tcg_temp_free_i64(t0);
2783 #else
2784 g_assert_not_reached();
2785 #endif
2788 tcg_gen_andi_tl(t_ra, t_ra, mask);
2790 if (unlikely(Rc(ctx->opcode) != 0)) {
2791 gen_set_Rc0(ctx, t_ra);
2795 #if defined(TARGET_PPC64)
2796 #define GEN_PPC64_R2(name, opc1, opc2) \
2797 static void glue(gen_, name##0)(DisasContext *ctx) \
2799 gen_##name(ctx, 0); \
2802 static void glue(gen_, name##1)(DisasContext *ctx) \
2804 gen_##name(ctx, 1); \
2806 #define GEN_PPC64_R4(name, opc1, opc2) \
2807 static void glue(gen_, name##0)(DisasContext *ctx) \
2809 gen_##name(ctx, 0, 0); \
2812 static void glue(gen_, name##1)(DisasContext *ctx) \
2814 gen_##name(ctx, 0, 1); \
2817 static void glue(gen_, name##2)(DisasContext *ctx) \
2819 gen_##name(ctx, 1, 0); \
2822 static void glue(gen_, name##3)(DisasContext *ctx) \
2824 gen_##name(ctx, 1, 1); \
2827 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2829 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2830 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2831 int len = me - mb + 1;
2832 int rsh = (64 - sh) & 63;
2834 if (sh != 0 && len > 0 && me == (63 - sh)) {
2835 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2836 } else if (me == 63 && rsh + len <= 64) {
2837 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2838 } else {
2839 tcg_gen_rotli_tl(t_ra, t_rs, sh);
2840 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2842 if (unlikely(Rc(ctx->opcode) != 0)) {
2843 gen_set_Rc0(ctx, t_ra);
2847 /* rldicl - rldicl. */
2848 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2850 uint32_t sh, mb;
2852 sh = SH(ctx->opcode) | (shn << 5);
2853 mb = MB(ctx->opcode) | (mbn << 5);
2854 gen_rldinm(ctx, mb, 63, sh);
2856 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2858 /* rldicr - rldicr. */
2859 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2861 uint32_t sh, me;
2863 sh = SH(ctx->opcode) | (shn << 5);
2864 me = MB(ctx->opcode) | (men << 5);
2865 gen_rldinm(ctx, 0, me, sh);
2867 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2869 /* rldic - rldic. */
2870 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2872 uint32_t sh, mb;
2874 sh = SH(ctx->opcode) | (shn << 5);
2875 mb = MB(ctx->opcode) | (mbn << 5);
2876 gen_rldinm(ctx, mb, 63 - sh, sh);
2878 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2880 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2882 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2883 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2884 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2885 TCGv t0;
2887 t0 = tcg_temp_new();
2888 tcg_gen_andi_tl(t0, t_rb, 0x3f);
2889 tcg_gen_rotl_tl(t_ra, t_rs, t0);
2890 tcg_temp_free(t0);
2892 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2893 if (unlikely(Rc(ctx->opcode) != 0)) {
2894 gen_set_Rc0(ctx, t_ra);
2898 /* rldcl - rldcl. */
2899 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2901 uint32_t mb;
2903 mb = MB(ctx->opcode) | (mbn << 5);
2904 gen_rldnm(ctx, mb, 63);
2906 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2908 /* rldcr - rldcr. */
2909 static inline void gen_rldcr(DisasContext *ctx, int men)
2911 uint32_t me;
2913 me = MB(ctx->opcode) | (men << 5);
2914 gen_rldnm(ctx, 0, me);
2916 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2918 /* rldimi - rldimi. */
2919 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2921 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2922 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2923 uint32_t sh = SH(ctx->opcode) | (shn << 5);
2924 uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2925 uint32_t me = 63 - sh;
2927 if (mb <= me) {
2928 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2929 } else {
2930 target_ulong mask = MASK(mb, me);
2931 TCGv t1 = tcg_temp_new();
2933 tcg_gen_rotli_tl(t1, t_rs, sh);
2934 tcg_gen_andi_tl(t1, t1, mask);
2935 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2936 tcg_gen_or_tl(t_ra, t_ra, t1);
2937 tcg_temp_free(t1);
2939 if (unlikely(Rc(ctx->opcode) != 0)) {
2940 gen_set_Rc0(ctx, t_ra);
2943 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2944 #endif
2946 /*** Integer shift ***/
2948 /* slw & slw. */
2949 static void gen_slw(DisasContext *ctx)
2951 TCGv t0, t1;
2953 t0 = tcg_temp_new();
2954 /* AND rS with a mask that is 0 when rB >= 0x20 */
2955 #if defined(TARGET_PPC64)
2956 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2957 tcg_gen_sari_tl(t0, t0, 0x3f);
2958 #else
2959 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2960 tcg_gen_sari_tl(t0, t0, 0x1f);
2961 #endif
2962 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2963 t1 = tcg_temp_new();
2964 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2965 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2966 tcg_temp_free(t1);
2967 tcg_temp_free(t0);
2968 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2969 if (unlikely(Rc(ctx->opcode) != 0)) {
2970 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2974 /* sraw & sraw. */
2975 static void gen_sraw(DisasContext *ctx)
2977 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2978 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2979 if (unlikely(Rc(ctx->opcode) != 0)) {
2980 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2984 /* srawi & srawi. */
2985 static void gen_srawi(DisasContext *ctx)
2987 int sh = SH(ctx->opcode);
2988 TCGv dst = cpu_gpr[rA(ctx->opcode)];
2989 TCGv src = cpu_gpr[rS(ctx->opcode)];
2990 if (sh == 0) {
2991 tcg_gen_ext32s_tl(dst, src);
2992 tcg_gen_movi_tl(cpu_ca, 0);
2993 if (is_isa300(ctx)) {
2994 tcg_gen_movi_tl(cpu_ca32, 0);
2996 } else {
2997 TCGv t0;
2998 tcg_gen_ext32s_tl(dst, src);
2999 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
3000 t0 = tcg_temp_new();
3001 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
3002 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3003 tcg_temp_free(t0);
3004 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3005 if (is_isa300(ctx)) {
3006 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3008 tcg_gen_sari_tl(dst, dst, sh);
3010 if (unlikely(Rc(ctx->opcode) != 0)) {
3011 gen_set_Rc0(ctx, dst);
3015 /* srw & srw. */
3016 static void gen_srw(DisasContext *ctx)
3018 TCGv t0, t1;
3020 t0 = tcg_temp_new();
3021 /* AND rS with a mask that is 0 when rB >= 0x20 */
3022 #if defined(TARGET_PPC64)
3023 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3024 tcg_gen_sari_tl(t0, t0, 0x3f);
3025 #else
3026 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3027 tcg_gen_sari_tl(t0, t0, 0x1f);
3028 #endif
3029 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3030 tcg_gen_ext32u_tl(t0, t0);
3031 t1 = tcg_temp_new();
3032 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3033 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3034 tcg_temp_free(t1);
3035 tcg_temp_free(t0);
3036 if (unlikely(Rc(ctx->opcode) != 0)) {
3037 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3041 #if defined(TARGET_PPC64)
3042 /* sld & sld. */
3043 static void gen_sld(DisasContext *ctx)
3045 TCGv t0, t1;
3047 t0 = tcg_temp_new();
3048 /* AND rS with a mask that is 0 when rB >= 0x40 */
3049 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3050 tcg_gen_sari_tl(t0, t0, 0x3f);
3051 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3052 t1 = tcg_temp_new();
3053 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3054 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3055 tcg_temp_free(t1);
3056 tcg_temp_free(t0);
3057 if (unlikely(Rc(ctx->opcode) != 0)) {
3058 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3062 /* srad & srad. */
3063 static void gen_srad(DisasContext *ctx)
3065 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3066 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3067 if (unlikely(Rc(ctx->opcode) != 0)) {
3068 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3071 /* sradi & sradi. */
3072 static inline void gen_sradi(DisasContext *ctx, int n)
3074 int sh = SH(ctx->opcode) + (n << 5);
3075 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3076 TCGv src = cpu_gpr[rS(ctx->opcode)];
3077 if (sh == 0) {
3078 tcg_gen_mov_tl(dst, src);
3079 tcg_gen_movi_tl(cpu_ca, 0);
3080 if (is_isa300(ctx)) {
3081 tcg_gen_movi_tl(cpu_ca32, 0);
3083 } else {
3084 TCGv t0;
3085 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3086 t0 = tcg_temp_new();
3087 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3088 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3089 tcg_temp_free(t0);
3090 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3091 if (is_isa300(ctx)) {
3092 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3094 tcg_gen_sari_tl(dst, src, sh);
3096 if (unlikely(Rc(ctx->opcode) != 0)) {
3097 gen_set_Rc0(ctx, dst);
3101 static void gen_sradi0(DisasContext *ctx)
3103 gen_sradi(ctx, 0);
3106 static void gen_sradi1(DisasContext *ctx)
3108 gen_sradi(ctx, 1);
3111 /* extswsli & extswsli. */
3112 static inline void gen_extswsli(DisasContext *ctx, int n)
3114 int sh = SH(ctx->opcode) + (n << 5);
3115 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3116 TCGv src = cpu_gpr[rS(ctx->opcode)];
3118 tcg_gen_ext32s_tl(dst, src);
3119 tcg_gen_shli_tl(dst, dst, sh);
3120 if (unlikely(Rc(ctx->opcode) != 0)) {
3121 gen_set_Rc0(ctx, dst);
3125 static void gen_extswsli0(DisasContext *ctx)
3127 gen_extswsli(ctx, 0);
3130 static void gen_extswsli1(DisasContext *ctx)
3132 gen_extswsli(ctx, 1);
3135 /* srd & srd. */
3136 static void gen_srd(DisasContext *ctx)
3138 TCGv t0, t1;
3140 t0 = tcg_temp_new();
3141 /* AND rS with a mask that is 0 when rB >= 0x40 */
3142 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3143 tcg_gen_sari_tl(t0, t0, 0x3f);
3144 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3145 t1 = tcg_temp_new();
3146 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3147 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3148 tcg_temp_free(t1);
3149 tcg_temp_free(t0);
3150 if (unlikely(Rc(ctx->opcode) != 0)) {
3151 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3154 #endif
3156 /*** Addressing modes ***/
3157 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3158 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3159 target_long maskl)
3161 target_long simm = SIMM(ctx->opcode);
3163 simm &= ~maskl;
3164 if (rA(ctx->opcode) == 0) {
3165 if (NARROW_MODE(ctx)) {
3166 simm = (uint32_t)simm;
3168 tcg_gen_movi_tl(EA, simm);
3169 } else if (likely(simm != 0)) {
3170 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3171 if (NARROW_MODE(ctx)) {
3172 tcg_gen_ext32u_tl(EA, EA);
3174 } else {
3175 if (NARROW_MODE(ctx)) {
3176 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3177 } else {
3178 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3183 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3185 if (rA(ctx->opcode) == 0) {
3186 if (NARROW_MODE(ctx)) {
3187 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3188 } else {
3189 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3191 } else {
3192 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3193 if (NARROW_MODE(ctx)) {
3194 tcg_gen_ext32u_tl(EA, EA);
3199 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3201 if (rA(ctx->opcode) == 0) {
3202 tcg_gen_movi_tl(EA, 0);
3203 } else if (NARROW_MODE(ctx)) {
3204 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3205 } else {
3206 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3210 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3211 target_long val)
3213 tcg_gen_addi_tl(ret, arg1, val);
3214 if (NARROW_MODE(ctx)) {
3215 tcg_gen_ext32u_tl(ret, ret);
3219 static inline void gen_align_no_le(DisasContext *ctx)
3221 gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3222 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3225 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3227 TCGv ea = tcg_temp_new();
3228 if (ra) {
3229 tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3230 } else {
3231 tcg_gen_mov_tl(ea, displ);
3233 if (NARROW_MODE(ctx)) {
3234 tcg_gen_ext32u_tl(ea, ea);
3236 return ea;
3239 /*** Integer load ***/
3240 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3241 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3243 #define GEN_QEMU_LOAD_TL(ldop, op) \
3244 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \
3245 TCGv val, \
3246 TCGv addr) \
3248 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \
3251 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB))
3252 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3253 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3254 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3255 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3257 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3258 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3260 #define GEN_QEMU_LOAD_64(ldop, op) \
3261 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \
3262 TCGv_i64 val, \
3263 TCGv addr) \
3265 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \
3268 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB))
3269 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3270 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3271 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3272 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q))
3274 #if defined(TARGET_PPC64)
3275 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q))
3276 #endif
3278 #define GEN_QEMU_STORE_TL(stop, op) \
3279 static void glue(gen_qemu_, stop)(DisasContext *ctx, \
3280 TCGv val, \
3281 TCGv addr) \
3283 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \
3286 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3287 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB))
3288 #endif
3289 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3290 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3292 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3293 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3295 #define GEN_QEMU_STORE_64(stop, op) \
3296 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \
3297 TCGv_i64 val, \
3298 TCGv addr) \
3300 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \
3303 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB))
3304 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3305 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3306 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q))
3308 #if defined(TARGET_PPC64)
3309 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q))
3310 #endif
3312 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
3313 static void glue(gen_, name##x)(DisasContext *ctx) \
3315 TCGv EA; \
3316 chk; \
3317 gen_set_access_type(ctx, ACCESS_INT); \
3318 EA = tcg_temp_new(); \
3319 gen_addr_reg_index(ctx, EA); \
3320 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3321 tcg_temp_free(EA); \
3324 #define GEN_LDX(name, ldop, opc2, opc3, type) \
3325 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3327 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
3328 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3330 #define GEN_LDEPX(name, ldop, opc2, opc3) \
3331 static void glue(gen_, name##epx)(DisasContext *ctx) \
3333 TCGv EA; \
3334 CHK_SV; \
3335 gen_set_access_type(ctx, ACCESS_INT); \
3336 EA = tcg_temp_new(); \
3337 gen_addr_reg_index(ctx, EA); \
3338 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3339 tcg_temp_free(EA); \
3342 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3343 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3344 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3345 #if defined(TARGET_PPC64)
3346 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
3347 #endif
3349 #if defined(TARGET_PPC64)
3350 /* CI load/store variants */
3351 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3352 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3353 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3354 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3355 #endif
3357 /*** Integer store ***/
3358 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
3359 static void glue(gen_, name##x)(DisasContext *ctx) \
3361 TCGv EA; \
3362 chk; \
3363 gen_set_access_type(ctx, ACCESS_INT); \
3364 EA = tcg_temp_new(); \
3365 gen_addr_reg_index(ctx, EA); \
3366 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3367 tcg_temp_free(EA); \
3369 #define GEN_STX(name, stop, opc2, opc3, type) \
3370 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3372 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
3373 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3375 #define GEN_STEPX(name, stop, opc2, opc3) \
3376 static void glue(gen_, name##epx)(DisasContext *ctx) \
3378 TCGv EA; \
3379 CHK_SV; \
3380 gen_set_access_type(ctx, ACCESS_INT); \
3381 EA = tcg_temp_new(); \
3382 gen_addr_reg_index(ctx, EA); \
3383 tcg_gen_qemu_st_tl( \
3384 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \
3385 tcg_temp_free(EA); \
3388 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3389 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3390 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3391 #if defined(TARGET_PPC64)
3392 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04)
3393 #endif
3395 #if defined(TARGET_PPC64)
3396 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3397 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3398 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3399 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3400 #endif
3401 /*** Integer load and store with byte reverse ***/
3403 /* lhbrx */
3404 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3406 /* lwbrx */
3407 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3409 #if defined(TARGET_PPC64)
3410 /* ldbrx */
3411 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3412 /* stdbrx */
3413 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3414 #endif /* TARGET_PPC64 */
3416 /* sthbrx */
3417 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3418 /* stwbrx */
3419 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3421 /*** Integer load and store multiple ***/
3423 /* lmw */
3424 static void gen_lmw(DisasContext *ctx)
3426 TCGv t0;
3427 TCGv_i32 t1;
3429 if (ctx->le_mode) {
3430 gen_align_no_le(ctx);
3431 return;
3433 gen_set_access_type(ctx, ACCESS_INT);
3434 t0 = tcg_temp_new();
3435 t1 = tcg_const_i32(rD(ctx->opcode));
3436 gen_addr_imm_index(ctx, t0, 0);
3437 gen_helper_lmw(cpu_env, t0, t1);
3438 tcg_temp_free(t0);
3439 tcg_temp_free_i32(t1);
3442 /* stmw */
3443 static void gen_stmw(DisasContext *ctx)
3445 TCGv t0;
3446 TCGv_i32 t1;
3448 if (ctx->le_mode) {
3449 gen_align_no_le(ctx);
3450 return;
3452 gen_set_access_type(ctx, ACCESS_INT);
3453 t0 = tcg_temp_new();
3454 t1 = tcg_const_i32(rS(ctx->opcode));
3455 gen_addr_imm_index(ctx, t0, 0);
3456 gen_helper_stmw(cpu_env, t0, t1);
3457 tcg_temp_free(t0);
3458 tcg_temp_free_i32(t1);
3461 /*** Integer load and store strings ***/
3463 /* lswi */
3465 * PowerPC32 specification says we must generate an exception if rA is
3466 * in the range of registers to be loaded. In an other hand, IBM says
3467 * this is valid, but rA won't be loaded. For now, I'll follow the
3468 * spec...
3470 static void gen_lswi(DisasContext *ctx)
3472 TCGv t0;
3473 TCGv_i32 t1, t2;
3474 int nb = NB(ctx->opcode);
3475 int start = rD(ctx->opcode);
3476 int ra = rA(ctx->opcode);
3477 int nr;
3479 if (ctx->le_mode) {
3480 gen_align_no_le(ctx);
3481 return;
3483 if (nb == 0) {
3484 nb = 32;
3486 nr = DIV_ROUND_UP(nb, 4);
3487 if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3488 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3489 return;
3491 gen_set_access_type(ctx, ACCESS_INT);
3492 t0 = tcg_temp_new();
3493 gen_addr_register(ctx, t0);
3494 t1 = tcg_const_i32(nb);
3495 t2 = tcg_const_i32(start);
3496 gen_helper_lsw(cpu_env, t0, t1, t2);
3497 tcg_temp_free(t0);
3498 tcg_temp_free_i32(t1);
3499 tcg_temp_free_i32(t2);
3502 /* lswx */
3503 static void gen_lswx(DisasContext *ctx)
3505 TCGv t0;
3506 TCGv_i32 t1, t2, t3;
3508 if (ctx->le_mode) {
3509 gen_align_no_le(ctx);
3510 return;
3512 gen_set_access_type(ctx, ACCESS_INT);
3513 t0 = tcg_temp_new();
3514 gen_addr_reg_index(ctx, t0);
3515 t1 = tcg_const_i32(rD(ctx->opcode));
3516 t2 = tcg_const_i32(rA(ctx->opcode));
3517 t3 = tcg_const_i32(rB(ctx->opcode));
3518 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3519 tcg_temp_free(t0);
3520 tcg_temp_free_i32(t1);
3521 tcg_temp_free_i32(t2);
3522 tcg_temp_free_i32(t3);
3525 /* stswi */
3526 static void gen_stswi(DisasContext *ctx)
3528 TCGv t0;
3529 TCGv_i32 t1, t2;
3530 int nb = NB(ctx->opcode);
3532 if (ctx->le_mode) {
3533 gen_align_no_le(ctx);
3534 return;
3536 gen_set_access_type(ctx, ACCESS_INT);
3537 t0 = tcg_temp_new();
3538 gen_addr_register(ctx, t0);
3539 if (nb == 0) {
3540 nb = 32;
3542 t1 = tcg_const_i32(nb);
3543 t2 = tcg_const_i32(rS(ctx->opcode));
3544 gen_helper_stsw(cpu_env, t0, t1, t2);
3545 tcg_temp_free(t0);
3546 tcg_temp_free_i32(t1);
3547 tcg_temp_free_i32(t2);
3550 /* stswx */
3551 static void gen_stswx(DisasContext *ctx)
3553 TCGv t0;
3554 TCGv_i32 t1, t2;
3556 if (ctx->le_mode) {
3557 gen_align_no_le(ctx);
3558 return;
3560 gen_set_access_type(ctx, ACCESS_INT);
3561 t0 = tcg_temp_new();
3562 gen_addr_reg_index(ctx, t0);
3563 t1 = tcg_temp_new_i32();
3564 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3565 tcg_gen_andi_i32(t1, t1, 0x7F);
3566 t2 = tcg_const_i32(rS(ctx->opcode));
3567 gen_helper_stsw(cpu_env, t0, t1, t2);
3568 tcg_temp_free(t0);
3569 tcg_temp_free_i32(t1);
3570 tcg_temp_free_i32(t2);
3573 /*** Memory synchronisation ***/
3574 /* eieio */
3575 static void gen_eieio(DisasContext *ctx)
3577 TCGBar bar = TCG_MO_LD_ST;
3580 * POWER9 has a eieio instruction variant using bit 6 as a hint to
3581 * tell the CPU it is a store-forwarding barrier.
3583 if (ctx->opcode & 0x2000000) {
3585 * ISA says that "Reserved fields in instructions are ignored
3586 * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3587 * as this is not an instruction software should be using,
3588 * complain to the user.
3590 if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3591 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3592 TARGET_FMT_lx "\n", ctx->cia);
3593 } else {
3594 bar = TCG_MO_ST_LD;
3598 tcg_gen_mb(bar | TCG_BAR_SC);
3601 #if !defined(CONFIG_USER_ONLY)
3602 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3604 TCGv_i32 t;
3605 TCGLabel *l;
3607 if (!ctx->lazy_tlb_flush) {
3608 return;
3610 l = gen_new_label();
3611 t = tcg_temp_new_i32();
3612 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3613 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3614 if (global) {
3615 gen_helper_check_tlb_flush_global(cpu_env);
3616 } else {
3617 gen_helper_check_tlb_flush_local(cpu_env);
3619 gen_set_label(l);
3620 tcg_temp_free_i32(t);
3622 #else
3623 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3624 #endif
3626 /* isync */
3627 static void gen_isync(DisasContext *ctx)
3630 * We need to check for a pending TLB flush. This can only happen in
3631 * kernel mode however so check MSR_PR
3633 if (!ctx->pr) {
3634 gen_check_tlb_flush(ctx, false);
3636 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3637 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3640 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE))
3642 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3644 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3645 TCGv t0 = tcg_temp_new();
3647 gen_set_access_type(ctx, ACCESS_RES);
3648 gen_addr_reg_index(ctx, t0);
3649 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3650 tcg_gen_mov_tl(cpu_reserve, t0);
3651 tcg_gen_mov_tl(cpu_reserve_val, gpr);
3652 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3653 tcg_temp_free(t0);
3656 #define LARX(name, memop) \
3657 static void gen_##name(DisasContext *ctx) \
3659 gen_load_locked(ctx, memop); \
3662 /* lwarx */
3663 LARX(lbarx, DEF_MEMOP(MO_UB))
3664 LARX(lharx, DEF_MEMOP(MO_UW))
3665 LARX(lwarx, DEF_MEMOP(MO_UL))
3667 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3668 TCGv EA, TCGCond cond, int addend)
3670 TCGv t = tcg_temp_new();
3671 TCGv t2 = tcg_temp_new();
3672 TCGv u = tcg_temp_new();
3674 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3675 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3676 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3677 tcg_gen_addi_tl(u, t, addend);
3679 /* E.g. for fetch and increment bounded... */
3680 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3681 tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3682 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3684 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3685 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3686 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3688 tcg_temp_free(t);
3689 tcg_temp_free(t2);
3690 tcg_temp_free(u);
3693 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3695 uint32_t gpr_FC = FC(ctx->opcode);
3696 TCGv EA = tcg_temp_new();
3697 int rt = rD(ctx->opcode);
3698 bool need_serial;
3699 TCGv src, dst;
3701 gen_addr_register(ctx, EA);
3702 dst = cpu_gpr[rt];
3703 src = cpu_gpr[(rt + 1) & 31];
3705 need_serial = false;
3706 memop |= MO_ALIGN;
3707 switch (gpr_FC) {
3708 case 0: /* Fetch and add */
3709 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3710 break;
3711 case 1: /* Fetch and xor */
3712 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3713 break;
3714 case 2: /* Fetch and or */
3715 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3716 break;
3717 case 3: /* Fetch and 'and' */
3718 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3719 break;
3720 case 4: /* Fetch and max unsigned */
3721 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3722 break;
3723 case 5: /* Fetch and max signed */
3724 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3725 break;
3726 case 6: /* Fetch and min unsigned */
3727 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3728 break;
3729 case 7: /* Fetch and min signed */
3730 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3731 break;
3732 case 8: /* Swap */
3733 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3734 break;
3736 case 16: /* Compare and swap not equal */
3737 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3738 need_serial = true;
3739 } else {
3740 TCGv t0 = tcg_temp_new();
3741 TCGv t1 = tcg_temp_new();
3743 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3744 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3745 tcg_gen_mov_tl(t1, src);
3746 } else {
3747 tcg_gen_ext32u_tl(t1, src);
3749 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3750 cpu_gpr[(rt + 2) & 31], t0);
3751 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3752 tcg_gen_mov_tl(dst, t0);
3754 tcg_temp_free(t0);
3755 tcg_temp_free(t1);
3757 break;
3759 case 24: /* Fetch and increment bounded */
3760 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3761 need_serial = true;
3762 } else {
3763 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3765 break;
3766 case 25: /* Fetch and increment equal */
3767 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3768 need_serial = true;
3769 } else {
3770 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3772 break;
3773 case 28: /* Fetch and decrement bounded */
3774 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3775 need_serial = true;
3776 } else {
3777 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3779 break;
3781 default:
3782 /* invoke data storage error handler */
3783 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3785 tcg_temp_free(EA);
3787 if (need_serial) {
3788 /* Restart with exclusive lock. */
3789 gen_helper_exit_atomic(cpu_env);
3790 ctx->base.is_jmp = DISAS_NORETURN;
3794 static void gen_lwat(DisasContext *ctx)
3796 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3799 #ifdef TARGET_PPC64
3800 static void gen_ldat(DisasContext *ctx)
3802 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q));
3804 #endif
3806 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3808 uint32_t gpr_FC = FC(ctx->opcode);
3809 TCGv EA = tcg_temp_new();
3810 TCGv src, discard;
3812 gen_addr_register(ctx, EA);
3813 src = cpu_gpr[rD(ctx->opcode)];
3814 discard = tcg_temp_new();
3816 memop |= MO_ALIGN;
3817 switch (gpr_FC) {
3818 case 0: /* add and Store */
3819 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3820 break;
3821 case 1: /* xor and Store */
3822 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3823 break;
3824 case 2: /* Or and Store */
3825 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3826 break;
3827 case 3: /* 'and' and Store */
3828 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3829 break;
3830 case 4: /* Store max unsigned */
3831 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3832 break;
3833 case 5: /* Store max signed */
3834 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3835 break;
3836 case 6: /* Store min unsigned */
3837 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3838 break;
3839 case 7: /* Store min signed */
3840 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3841 break;
3842 case 24: /* Store twin */
3843 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3844 /* Restart with exclusive lock. */
3845 gen_helper_exit_atomic(cpu_env);
3846 ctx->base.is_jmp = DISAS_NORETURN;
3847 } else {
3848 TCGv t = tcg_temp_new();
3849 TCGv t2 = tcg_temp_new();
3850 TCGv s = tcg_temp_new();
3851 TCGv s2 = tcg_temp_new();
3852 TCGv ea_plus_s = tcg_temp_new();
3854 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3855 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3856 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3857 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3858 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3859 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3860 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3862 tcg_temp_free(ea_plus_s);
3863 tcg_temp_free(s2);
3864 tcg_temp_free(s);
3865 tcg_temp_free(t2);
3866 tcg_temp_free(t);
3868 break;
3869 default:
3870 /* invoke data storage error handler */
3871 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3873 tcg_temp_free(discard);
3874 tcg_temp_free(EA);
3877 static void gen_stwat(DisasContext *ctx)
3879 gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3882 #ifdef TARGET_PPC64
3883 static void gen_stdat(DisasContext *ctx)
3885 gen_st_atomic(ctx, DEF_MEMOP(MO_Q));
3887 #endif
3889 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3891 TCGLabel *l1 = gen_new_label();
3892 TCGLabel *l2 = gen_new_label();
3893 TCGv t0 = tcg_temp_new();
3894 int reg = rS(ctx->opcode);
3896 gen_set_access_type(ctx, ACCESS_RES);
3897 gen_addr_reg_index(ctx, t0);
3898 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3899 tcg_temp_free(t0);
3901 t0 = tcg_temp_new();
3902 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3903 cpu_gpr[reg], ctx->mem_idx,
3904 DEF_MEMOP(memop) | MO_ALIGN);
3905 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3906 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3907 tcg_gen_or_tl(t0, t0, cpu_so);
3908 tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3909 tcg_temp_free(t0);
3910 tcg_gen_br(l2);
3912 gen_set_label(l1);
3915 * Address mismatch implies failure. But we still need to provide
3916 * the memory barrier semantics of the instruction.
3918 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3919 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3921 gen_set_label(l2);
3922 tcg_gen_movi_tl(cpu_reserve, -1);
3925 #define STCX(name, memop) \
3926 static void gen_##name(DisasContext *ctx) \
3928 gen_conditional_store(ctx, memop); \
3931 STCX(stbcx_, DEF_MEMOP(MO_UB))
3932 STCX(sthcx_, DEF_MEMOP(MO_UW))
3933 STCX(stwcx_, DEF_MEMOP(MO_UL))
3935 #if defined(TARGET_PPC64)
3936 /* ldarx */
3937 LARX(ldarx, DEF_MEMOP(MO_Q))
3938 /* stdcx. */
3939 STCX(stdcx_, DEF_MEMOP(MO_Q))
3941 /* lqarx */
3942 static void gen_lqarx(DisasContext *ctx)
3944 int rd = rD(ctx->opcode);
3945 TCGv EA, hi, lo;
3947 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3948 (rd == rB(ctx->opcode)))) {
3949 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3950 return;
3953 gen_set_access_type(ctx, ACCESS_RES);
3954 EA = tcg_temp_new();
3955 gen_addr_reg_index(ctx, EA);
3957 /* Note that the low part is always in RD+1, even in LE mode. */
3958 lo = cpu_gpr[rd + 1];
3959 hi = cpu_gpr[rd];
3961 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3962 if (HAVE_ATOMIC128) {
3963 TCGv_i32 oi = tcg_temp_new_i32();
3964 if (ctx->le_mode) {
3965 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3966 ctx->mem_idx));
3967 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3968 } else {
3969 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3970 ctx->mem_idx));
3971 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3973 tcg_temp_free_i32(oi);
3974 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3975 } else {
3976 /* Restart with exclusive lock. */
3977 gen_helper_exit_atomic(cpu_env);
3978 ctx->base.is_jmp = DISAS_NORETURN;
3979 tcg_temp_free(EA);
3980 return;
3982 } else if (ctx->le_mode) {
3983 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16);
3984 tcg_gen_mov_tl(cpu_reserve, EA);
3985 gen_addr_add(ctx, EA, EA, 8);
3986 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3987 } else {
3988 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16);
3989 tcg_gen_mov_tl(cpu_reserve, EA);
3990 gen_addr_add(ctx, EA, EA, 8);
3991 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3993 tcg_temp_free(EA);
3995 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3996 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3999 /* stqcx. */
4000 static void gen_stqcx_(DisasContext *ctx)
4002 int rs = rS(ctx->opcode);
4003 TCGv EA, hi, lo;
4005 if (unlikely(rs & 1)) {
4006 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4007 return;
4010 gen_set_access_type(ctx, ACCESS_RES);
4011 EA = tcg_temp_new();
4012 gen_addr_reg_index(ctx, EA);
4014 /* Note that the low part is always in RS+1, even in LE mode. */
4015 lo = cpu_gpr[rs + 1];
4016 hi = cpu_gpr[rs];
4018 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4019 if (HAVE_CMPXCHG128) {
4020 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN);
4021 if (ctx->le_mode) {
4022 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4023 EA, lo, hi, oi);
4024 } else {
4025 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4026 EA, lo, hi, oi);
4028 tcg_temp_free_i32(oi);
4029 } else {
4030 /* Restart with exclusive lock. */
4031 gen_helper_exit_atomic(cpu_env);
4032 ctx->base.is_jmp = DISAS_NORETURN;
4034 tcg_temp_free(EA);
4035 } else {
4036 TCGLabel *lab_fail = gen_new_label();
4037 TCGLabel *lab_over = gen_new_label();
4038 TCGv_i64 t0 = tcg_temp_new_i64();
4039 TCGv_i64 t1 = tcg_temp_new_i64();
4041 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4042 tcg_temp_free(EA);
4044 gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4045 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4046 ? offsetof(CPUPPCState, reserve_val2)
4047 : offsetof(CPUPPCState, reserve_val)));
4048 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4050 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4051 gen_qemu_ld64_i64(ctx, t0, t0);
4052 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4053 ? offsetof(CPUPPCState, reserve_val)
4054 : offsetof(CPUPPCState, reserve_val2)));
4055 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4057 /* Success */
4058 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4059 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4060 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4062 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4063 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4064 tcg_gen_br(lab_over);
4066 gen_set_label(lab_fail);
4067 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4069 gen_set_label(lab_over);
4070 tcg_gen_movi_tl(cpu_reserve, -1);
4071 tcg_temp_free_i64(t0);
4072 tcg_temp_free_i64(t1);
4075 #endif /* defined(TARGET_PPC64) */
4077 /* sync */
4078 static void gen_sync(DisasContext *ctx)
4080 uint32_t l = (ctx->opcode >> 21) & 3;
4083 * We may need to check for a pending TLB flush.
4085 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4087 * Additionally, this can only happen in kernel mode however so
4088 * check MSR_PR as well.
4090 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4091 gen_check_tlb_flush(ctx, true);
4093 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4096 /* wait */
4097 static void gen_wait(DisasContext *ctx)
4099 TCGv_i32 t0 = tcg_const_i32(1);
4100 tcg_gen_st_i32(t0, cpu_env,
4101 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4102 tcg_temp_free_i32(t0);
4103 /* Stop translation, as the CPU is supposed to sleep from now */
4104 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4107 #if defined(TARGET_PPC64)
4108 static void gen_doze(DisasContext *ctx)
4110 #if defined(CONFIG_USER_ONLY)
4111 GEN_PRIV;
4112 #else
4113 TCGv_i32 t;
4115 CHK_HV;
4116 t = tcg_const_i32(PPC_PM_DOZE);
4117 gen_helper_pminsn(cpu_env, t);
4118 tcg_temp_free_i32(t);
4119 /* Stop translation, as the CPU is supposed to sleep from now */
4120 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4121 #endif /* defined(CONFIG_USER_ONLY) */
4124 static void gen_nap(DisasContext *ctx)
4126 #if defined(CONFIG_USER_ONLY)
4127 GEN_PRIV;
4128 #else
4129 TCGv_i32 t;
4131 CHK_HV;
4132 t = tcg_const_i32(PPC_PM_NAP);
4133 gen_helper_pminsn(cpu_env, t);
4134 tcg_temp_free_i32(t);
4135 /* Stop translation, as the CPU is supposed to sleep from now */
4136 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4137 #endif /* defined(CONFIG_USER_ONLY) */
4140 static void gen_stop(DisasContext *ctx)
4142 #if defined(CONFIG_USER_ONLY)
4143 GEN_PRIV;
4144 #else
4145 TCGv_i32 t;
4147 CHK_HV;
4148 t = tcg_const_i32(PPC_PM_STOP);
4149 gen_helper_pminsn(cpu_env, t);
4150 tcg_temp_free_i32(t);
4151 /* Stop translation, as the CPU is supposed to sleep from now */
4152 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4153 #endif /* defined(CONFIG_USER_ONLY) */
4156 static void gen_sleep(DisasContext *ctx)
4158 #if defined(CONFIG_USER_ONLY)
4159 GEN_PRIV;
4160 #else
4161 TCGv_i32 t;
4163 CHK_HV;
4164 t = tcg_const_i32(PPC_PM_SLEEP);
4165 gen_helper_pminsn(cpu_env, t);
4166 tcg_temp_free_i32(t);
4167 /* Stop translation, as the CPU is supposed to sleep from now */
4168 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4169 #endif /* defined(CONFIG_USER_ONLY) */
4172 static void gen_rvwinkle(DisasContext *ctx)
4174 #if defined(CONFIG_USER_ONLY)
4175 GEN_PRIV;
4176 #else
4177 TCGv_i32 t;
4179 CHK_HV;
4180 t = tcg_const_i32(PPC_PM_RVWINKLE);
4181 gen_helper_pminsn(cpu_env, t);
4182 tcg_temp_free_i32(t);
4183 /* Stop translation, as the CPU is supposed to sleep from now */
4184 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4185 #endif /* defined(CONFIG_USER_ONLY) */
4187 #endif /* #if defined(TARGET_PPC64) */
4189 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4191 #if defined(TARGET_PPC64)
4192 if (ctx->has_cfar) {
4193 tcg_gen_movi_tl(cpu_cfar, nip);
4195 #endif
4198 #if defined(TARGET_PPC64)
4199 static void pmu_count_insns(DisasContext *ctx)
4202 * Do not bother calling the helper if the PMU isn't counting
4203 * instructions.
4205 if (!ctx->pmu_insn_cnt) {
4206 return;
4209 #if !defined(CONFIG_USER_ONLY)
4211 * The PMU insns_inc() helper stops the internal PMU timer if a
4212 * counter overflows happens. In that case, if the guest is
4213 * running with icount and we do not handle it beforehand,
4214 * the helper can trigger a 'bad icount read'.
4216 gen_icount_io_start(ctx);
4218 gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4219 #else
4221 * User mode can read (but not write) PMC5 and start/stop
4222 * the PMU via MMCR0_FC. In this case just increment
4223 * PMC5 with base.num_insns.
4225 TCGv t0 = tcg_temp_new();
4227 gen_load_spr(t0, SPR_POWER_PMC5);
4228 tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4229 gen_store_spr(SPR_POWER_PMC5, t0);
4231 tcg_temp_free(t0);
4232 #endif /* #if !defined(CONFIG_USER_ONLY) */
4234 #else
4235 static void pmu_count_insns(DisasContext *ctx)
4237 return;
4239 #endif /* #if defined(TARGET_PPC64) */
4241 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4243 return translator_use_goto_tb(&ctx->base, dest);
4246 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4248 if (unlikely(ctx->singlestep_enabled)) {
4249 gen_debug_exception(ctx);
4250 } else {
4252 * tcg_gen_lookup_and_goto_ptr will exit the TB if
4253 * CF_NO_GOTO_PTR is set. Count insns now.
4255 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4256 pmu_count_insns(ctx);
4259 tcg_gen_lookup_and_goto_ptr();
4263 /*** Branch ***/
4264 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4266 if (NARROW_MODE(ctx)) {
4267 dest = (uint32_t) dest;
4269 if (use_goto_tb(ctx, dest)) {
4270 pmu_count_insns(ctx);
4271 tcg_gen_goto_tb(n);
4272 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4273 tcg_gen_exit_tb(ctx->base.tb, n);
4274 } else {
4275 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4276 gen_lookup_and_goto_ptr(ctx);
4280 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4282 if (NARROW_MODE(ctx)) {
4283 nip = (uint32_t)nip;
4285 tcg_gen_movi_tl(cpu_lr, nip);
4288 /* b ba bl bla */
4289 static void gen_b(DisasContext *ctx)
4291 target_ulong li, target;
4293 /* sign extend LI */
4294 li = LI(ctx->opcode);
4295 li = (li ^ 0x02000000) - 0x02000000;
4296 if (likely(AA(ctx->opcode) == 0)) {
4297 target = ctx->cia + li;
4298 } else {
4299 target = li;
4301 if (LK(ctx->opcode)) {
4302 gen_setlr(ctx, ctx->base.pc_next);
4304 gen_update_cfar(ctx, ctx->cia);
4305 gen_goto_tb(ctx, 0, target);
4306 ctx->base.is_jmp = DISAS_NORETURN;
4309 #define BCOND_IM 0
4310 #define BCOND_LR 1
4311 #define BCOND_CTR 2
4312 #define BCOND_TAR 3
4314 static void gen_bcond(DisasContext *ctx, int type)
4316 uint32_t bo = BO(ctx->opcode);
4317 TCGLabel *l1;
4318 TCGv target;
4320 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4321 target = tcg_temp_local_new();
4322 if (type == BCOND_CTR) {
4323 tcg_gen_mov_tl(target, cpu_ctr);
4324 } else if (type == BCOND_TAR) {
4325 gen_load_spr(target, SPR_TAR);
4326 } else {
4327 tcg_gen_mov_tl(target, cpu_lr);
4329 } else {
4330 target = NULL;
4332 if (LK(ctx->opcode)) {
4333 gen_setlr(ctx, ctx->base.pc_next);
4335 l1 = gen_new_label();
4336 if ((bo & 0x4) == 0) {
4337 /* Decrement and test CTR */
4338 TCGv temp = tcg_temp_new();
4340 if (type == BCOND_CTR) {
4342 * All ISAs up to v3 describe this form of bcctr as invalid but
4343 * some processors, ie. 64-bit server processors compliant with
4344 * arch 2.x, do implement a "test and decrement" logic instead,
4345 * as described in their respective UMs. This logic involves CTR
4346 * to act as both the branch target and a counter, which makes
4347 * it basically useless and thus never used in real code.
4349 * This form was hence chosen to trigger extra micro-architectural
4350 * side-effect on real HW needed for the Spectre v2 workaround.
4351 * It is up to guests that implement such workaround, ie. linux, to
4352 * use this form in a way it just triggers the side-effect without
4353 * doing anything else harmful.
4355 if (unlikely(!is_book3s_arch2x(ctx))) {
4356 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4357 tcg_temp_free(temp);
4358 tcg_temp_free(target);
4359 return;
4362 if (NARROW_MODE(ctx)) {
4363 tcg_gen_ext32u_tl(temp, cpu_ctr);
4364 } else {
4365 tcg_gen_mov_tl(temp, cpu_ctr);
4367 if (bo & 0x2) {
4368 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4369 } else {
4370 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4372 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4373 } else {
4374 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4375 if (NARROW_MODE(ctx)) {
4376 tcg_gen_ext32u_tl(temp, cpu_ctr);
4377 } else {
4378 tcg_gen_mov_tl(temp, cpu_ctr);
4380 if (bo & 0x2) {
4381 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4382 } else {
4383 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4386 tcg_temp_free(temp);
4388 if ((bo & 0x10) == 0) {
4389 /* Test CR */
4390 uint32_t bi = BI(ctx->opcode);
4391 uint32_t mask = 0x08 >> (bi & 0x03);
4392 TCGv_i32 temp = tcg_temp_new_i32();
4394 if (bo & 0x8) {
4395 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4396 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4397 } else {
4398 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4399 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4401 tcg_temp_free_i32(temp);
4403 gen_update_cfar(ctx, ctx->cia);
4404 if (type == BCOND_IM) {
4405 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4406 if (likely(AA(ctx->opcode) == 0)) {
4407 gen_goto_tb(ctx, 0, ctx->cia + li);
4408 } else {
4409 gen_goto_tb(ctx, 0, li);
4411 } else {
4412 if (NARROW_MODE(ctx)) {
4413 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4414 } else {
4415 tcg_gen_andi_tl(cpu_nip, target, ~3);
4417 gen_lookup_and_goto_ptr(ctx);
4418 tcg_temp_free(target);
4420 if ((bo & 0x14) != 0x14) {
4421 /* fallthrough case */
4422 gen_set_label(l1);
4423 gen_goto_tb(ctx, 1, ctx->base.pc_next);
4425 ctx->base.is_jmp = DISAS_NORETURN;
4428 static void gen_bc(DisasContext *ctx)
4430 gen_bcond(ctx, BCOND_IM);
4433 static void gen_bcctr(DisasContext *ctx)
4435 gen_bcond(ctx, BCOND_CTR);
4438 static void gen_bclr(DisasContext *ctx)
4440 gen_bcond(ctx, BCOND_LR);
4443 static void gen_bctar(DisasContext *ctx)
4445 gen_bcond(ctx, BCOND_TAR);
4448 /*** Condition register logical ***/
4449 #define GEN_CRLOGIC(name, tcg_op, opc) \
4450 static void glue(gen_, name)(DisasContext *ctx) \
4452 uint8_t bitmask; \
4453 int sh; \
4454 TCGv_i32 t0, t1; \
4455 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
4456 t0 = tcg_temp_new_i32(); \
4457 if (sh > 0) \
4458 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
4459 else if (sh < 0) \
4460 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
4461 else \
4462 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
4463 t1 = tcg_temp_new_i32(); \
4464 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
4465 if (sh > 0) \
4466 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
4467 else if (sh < 0) \
4468 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
4469 else \
4470 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
4471 tcg_op(t0, t0, t1); \
4472 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
4473 tcg_gen_andi_i32(t0, t0, bitmask); \
4474 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
4475 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
4476 tcg_temp_free_i32(t0); \
4477 tcg_temp_free_i32(t1); \
4480 /* crand */
4481 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4482 /* crandc */
4483 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4484 /* creqv */
4485 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4486 /* crnand */
4487 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4488 /* crnor */
4489 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4490 /* cror */
4491 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4492 /* crorc */
4493 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4494 /* crxor */
4495 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4497 /* mcrf */
4498 static void gen_mcrf(DisasContext *ctx)
4500 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4503 /*** System linkage ***/
4505 /* rfi (supervisor only) */
4506 static void gen_rfi(DisasContext *ctx)
4508 #if defined(CONFIG_USER_ONLY)
4509 GEN_PRIV;
4510 #else
4512 * This instruction doesn't exist anymore on 64-bit server
4513 * processors compliant with arch 2.x
4515 if (is_book3s_arch2x(ctx)) {
4516 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4517 return;
4519 /* Restore CPU state */
4520 CHK_SV;
4521 gen_icount_io_start(ctx);
4522 gen_update_cfar(ctx, ctx->cia);
4523 gen_helper_rfi(cpu_env);
4524 ctx->base.is_jmp = DISAS_EXIT;
4525 #endif
4528 #if defined(TARGET_PPC64)
4529 static void gen_rfid(DisasContext *ctx)
4531 #if defined(CONFIG_USER_ONLY)
4532 GEN_PRIV;
4533 #else
4534 /* Restore CPU state */
4535 CHK_SV;
4536 gen_icount_io_start(ctx);
4537 gen_update_cfar(ctx, ctx->cia);
4538 gen_helper_rfid(cpu_env);
4539 ctx->base.is_jmp = DISAS_EXIT;
4540 #endif
4543 #if !defined(CONFIG_USER_ONLY)
4544 static void gen_rfscv(DisasContext *ctx)
4546 #if defined(CONFIG_USER_ONLY)
4547 GEN_PRIV;
4548 #else
4549 /* Restore CPU state */
4550 CHK_SV;
4551 gen_icount_io_start(ctx);
4552 gen_update_cfar(ctx, ctx->cia);
4553 gen_helper_rfscv(cpu_env);
4554 ctx->base.is_jmp = DISAS_EXIT;
4555 #endif
4557 #endif
4559 static void gen_hrfid(DisasContext *ctx)
4561 #if defined(CONFIG_USER_ONLY)
4562 GEN_PRIV;
4563 #else
4564 /* Restore CPU state */
4565 CHK_HV;
4566 gen_helper_hrfid(cpu_env);
4567 ctx->base.is_jmp = DISAS_EXIT;
4568 #endif
4570 #endif
4572 /* sc */
4573 #if defined(CONFIG_USER_ONLY)
4574 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4575 #else
4576 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4577 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4578 #endif
4579 static void gen_sc(DisasContext *ctx)
4581 uint32_t lev;
4583 lev = (ctx->opcode >> 5) & 0x7F;
4584 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4587 #if defined(TARGET_PPC64)
4588 #if !defined(CONFIG_USER_ONLY)
4589 static void gen_scv(DisasContext *ctx)
4591 uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4593 /* Set the PC back to the faulting instruction. */
4594 gen_update_nip(ctx, ctx->cia);
4595 gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4597 ctx->base.is_jmp = DISAS_NORETURN;
4599 #endif
4600 #endif
4602 /*** Trap ***/
4604 /* Check for unconditional traps (always or never) */
4605 static bool check_unconditional_trap(DisasContext *ctx)
4607 /* Trap never */
4608 if (TO(ctx->opcode) == 0) {
4609 return true;
4611 /* Trap always */
4612 if (TO(ctx->opcode) == 31) {
4613 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4614 return true;
4616 return false;
4619 /* tw */
4620 static void gen_tw(DisasContext *ctx)
4622 TCGv_i32 t0;
4624 if (check_unconditional_trap(ctx)) {
4625 return;
4627 t0 = tcg_const_i32(TO(ctx->opcode));
4628 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4629 t0);
4630 tcg_temp_free_i32(t0);
4633 /* twi */
4634 static void gen_twi(DisasContext *ctx)
4636 TCGv t0;
4637 TCGv_i32 t1;
4639 if (check_unconditional_trap(ctx)) {
4640 return;
4642 t0 = tcg_const_tl(SIMM(ctx->opcode));
4643 t1 = tcg_const_i32(TO(ctx->opcode));
4644 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4645 tcg_temp_free(t0);
4646 tcg_temp_free_i32(t1);
4649 #if defined(TARGET_PPC64)
4650 /* td */
4651 static void gen_td(DisasContext *ctx)
4653 TCGv_i32 t0;
4655 if (check_unconditional_trap(ctx)) {
4656 return;
4658 t0 = tcg_const_i32(TO(ctx->opcode));
4659 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4660 t0);
4661 tcg_temp_free_i32(t0);
4664 /* tdi */
4665 static void gen_tdi(DisasContext *ctx)
4667 TCGv t0;
4668 TCGv_i32 t1;
4670 if (check_unconditional_trap(ctx)) {
4671 return;
4673 t0 = tcg_const_tl(SIMM(ctx->opcode));
4674 t1 = tcg_const_i32(TO(ctx->opcode));
4675 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4676 tcg_temp_free(t0);
4677 tcg_temp_free_i32(t1);
4679 #endif
4681 /*** Processor control ***/
4683 /* mcrxr */
4684 static void gen_mcrxr(DisasContext *ctx)
4686 TCGv_i32 t0 = tcg_temp_new_i32();
4687 TCGv_i32 t1 = tcg_temp_new_i32();
4688 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4690 tcg_gen_trunc_tl_i32(t0, cpu_so);
4691 tcg_gen_trunc_tl_i32(t1, cpu_ov);
4692 tcg_gen_trunc_tl_i32(dst, cpu_ca);
4693 tcg_gen_shli_i32(t0, t0, 3);
4694 tcg_gen_shli_i32(t1, t1, 2);
4695 tcg_gen_shli_i32(dst, dst, 1);
4696 tcg_gen_or_i32(dst, dst, t0);
4697 tcg_gen_or_i32(dst, dst, t1);
4698 tcg_temp_free_i32(t0);
4699 tcg_temp_free_i32(t1);
4701 tcg_gen_movi_tl(cpu_so, 0);
4702 tcg_gen_movi_tl(cpu_ov, 0);
4703 tcg_gen_movi_tl(cpu_ca, 0);
4706 #ifdef TARGET_PPC64
4707 /* mcrxrx */
4708 static void gen_mcrxrx(DisasContext *ctx)
4710 TCGv t0 = tcg_temp_new();
4711 TCGv t1 = tcg_temp_new();
4712 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4714 /* copy OV and OV32 */
4715 tcg_gen_shli_tl(t0, cpu_ov, 1);
4716 tcg_gen_or_tl(t0, t0, cpu_ov32);
4717 tcg_gen_shli_tl(t0, t0, 2);
4718 /* copy CA and CA32 */
4719 tcg_gen_shli_tl(t1, cpu_ca, 1);
4720 tcg_gen_or_tl(t1, t1, cpu_ca32);
4721 tcg_gen_or_tl(t0, t0, t1);
4722 tcg_gen_trunc_tl_i32(dst, t0);
4723 tcg_temp_free(t0);
4724 tcg_temp_free(t1);
4726 #endif
4728 /* mfcr mfocrf */
4729 static void gen_mfcr(DisasContext *ctx)
4731 uint32_t crm, crn;
4733 if (likely(ctx->opcode & 0x00100000)) {
4734 crm = CRM(ctx->opcode);
4735 if (likely(crm && ((crm & (crm - 1)) == 0))) {
4736 crn = ctz32(crm);
4737 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4738 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4739 cpu_gpr[rD(ctx->opcode)], crn * 4);
4741 } else {
4742 TCGv_i32 t0 = tcg_temp_new_i32();
4743 tcg_gen_mov_i32(t0, cpu_crf[0]);
4744 tcg_gen_shli_i32(t0, t0, 4);
4745 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4746 tcg_gen_shli_i32(t0, t0, 4);
4747 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4748 tcg_gen_shli_i32(t0, t0, 4);
4749 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4750 tcg_gen_shli_i32(t0, t0, 4);
4751 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4752 tcg_gen_shli_i32(t0, t0, 4);
4753 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4754 tcg_gen_shli_i32(t0, t0, 4);
4755 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4756 tcg_gen_shli_i32(t0, t0, 4);
4757 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4758 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4759 tcg_temp_free_i32(t0);
4763 /* mfmsr */
4764 static void gen_mfmsr(DisasContext *ctx)
4766 CHK_SV;
4767 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4770 /* mfspr */
4771 static inline void gen_op_mfspr(DisasContext *ctx)
4773 void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4774 uint32_t sprn = SPR(ctx->opcode);
4776 #if defined(CONFIG_USER_ONLY)
4777 read_cb = ctx->spr_cb[sprn].uea_read;
4778 #else
4779 if (ctx->pr) {
4780 read_cb = ctx->spr_cb[sprn].uea_read;
4781 } else if (ctx->hv) {
4782 read_cb = ctx->spr_cb[sprn].hea_read;
4783 } else {
4784 read_cb = ctx->spr_cb[sprn].oea_read;
4786 #endif
4787 if (likely(read_cb != NULL)) {
4788 if (likely(read_cb != SPR_NOACCESS)) {
4789 (*read_cb)(ctx, rD(ctx->opcode), sprn);
4790 } else {
4791 /* Privilege exception */
4793 * This is a hack to avoid warnings when running Linux:
4794 * this OS breaks the PowerPC virtualisation model,
4795 * allowing userland application to read the PVR
4797 if (sprn != SPR_PVR) {
4798 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4799 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4800 ctx->cia);
4802 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4804 } else {
4805 /* ISA 2.07 defines these as no-ops */
4806 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4807 (sprn >= 808 && sprn <= 811)) {
4808 /* This is a nop */
4809 return;
4811 /* Not defined */
4812 qemu_log_mask(LOG_GUEST_ERROR,
4813 "Trying to read invalid spr %d (0x%03x) at "
4814 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4817 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4818 * generate a priv, a hv emu or a no-op
4820 if (sprn & 0x10) {
4821 if (ctx->pr) {
4822 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4824 } else {
4825 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4826 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4832 static void gen_mfspr(DisasContext *ctx)
4834 gen_op_mfspr(ctx);
4837 /* mftb */
4838 static void gen_mftb(DisasContext *ctx)
4840 gen_op_mfspr(ctx);
4843 /* mtcrf mtocrf*/
4844 static void gen_mtcrf(DisasContext *ctx)
4846 uint32_t crm, crn;
4848 crm = CRM(ctx->opcode);
4849 if (likely((ctx->opcode & 0x00100000))) {
4850 if (crm && ((crm & (crm - 1)) == 0)) {
4851 TCGv_i32 temp = tcg_temp_new_i32();
4852 crn = ctz32(crm);
4853 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4854 tcg_gen_shri_i32(temp, temp, crn * 4);
4855 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4856 tcg_temp_free_i32(temp);
4858 } else {
4859 TCGv_i32 temp = tcg_temp_new_i32();
4860 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4861 for (crn = 0 ; crn < 8 ; crn++) {
4862 if (crm & (1 << crn)) {
4863 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4864 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4867 tcg_temp_free_i32(temp);
4871 /* mtmsr */
4872 #if defined(TARGET_PPC64)
4873 static void gen_mtmsrd(DisasContext *ctx)
4875 if (unlikely(!is_book3s_arch2x(ctx))) {
4876 gen_invalid(ctx);
4877 return;
4880 CHK_SV;
4882 #if !defined(CONFIG_USER_ONLY)
4883 TCGv t0, t1;
4884 target_ulong mask;
4886 t0 = tcg_temp_new();
4887 t1 = tcg_temp_new();
4889 gen_icount_io_start(ctx);
4891 if (ctx->opcode & 0x00010000) {
4892 /* L=1 form only updates EE and RI */
4893 mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4894 } else {
4895 /* mtmsrd does not alter HV, S, ME, or LE */
4896 mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4897 (1ULL << MSR_HV));
4899 * XXX: we need to update nip before the store if we enter
4900 * power saving mode, we will exit the loop directly from
4901 * ppc_store_msr
4903 gen_update_nip(ctx, ctx->base.pc_next);
4906 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4907 tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4908 tcg_gen_or_tl(t0, t0, t1);
4910 gen_helper_store_msr(cpu_env, t0);
4912 /* Must stop the translation as machine state (may have) changed */
4913 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4915 tcg_temp_free(t0);
4916 tcg_temp_free(t1);
4917 #endif /* !defined(CONFIG_USER_ONLY) */
4919 #endif /* defined(TARGET_PPC64) */
4921 static void gen_mtmsr(DisasContext *ctx)
4923 CHK_SV;
4925 #if !defined(CONFIG_USER_ONLY)
4926 TCGv t0, t1;
4927 target_ulong mask = 0xFFFFFFFF;
4929 t0 = tcg_temp_new();
4930 t1 = tcg_temp_new();
4932 gen_icount_io_start(ctx);
4933 if (ctx->opcode & 0x00010000) {
4934 /* L=1 form only updates EE and RI */
4935 mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4936 } else {
4937 /* mtmsr does not alter S, ME, or LE */
4938 mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4941 * XXX: we need to update nip before the store if we enter
4942 * power saving mode, we will exit the loop directly from
4943 * ppc_store_msr
4945 gen_update_nip(ctx, ctx->base.pc_next);
4948 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4949 tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4950 tcg_gen_or_tl(t0, t0, t1);
4952 gen_helper_store_msr(cpu_env, t0);
4954 /* Must stop the translation as machine state (may have) changed */
4955 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4957 tcg_temp_free(t0);
4958 tcg_temp_free(t1);
4959 #endif
4962 /* mtspr */
4963 static void gen_mtspr(DisasContext *ctx)
4965 void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4966 uint32_t sprn = SPR(ctx->opcode);
4968 #if defined(CONFIG_USER_ONLY)
4969 write_cb = ctx->spr_cb[sprn].uea_write;
4970 #else
4971 if (ctx->pr) {
4972 write_cb = ctx->spr_cb[sprn].uea_write;
4973 } else if (ctx->hv) {
4974 write_cb = ctx->spr_cb[sprn].hea_write;
4975 } else {
4976 write_cb = ctx->spr_cb[sprn].oea_write;
4978 #endif
4979 if (likely(write_cb != NULL)) {
4980 if (likely(write_cb != SPR_NOACCESS)) {
4981 (*write_cb)(ctx, sprn, rS(ctx->opcode));
4982 } else {
4983 /* Privilege exception */
4984 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4985 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4986 ctx->cia);
4987 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4989 } else {
4990 /* ISA 2.07 defines these as no-ops */
4991 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4992 (sprn >= 808 && sprn <= 811)) {
4993 /* This is a nop */
4994 return;
4997 /* Not defined */
4998 qemu_log_mask(LOG_GUEST_ERROR,
4999 "Trying to write invalid spr %d (0x%03x) at "
5000 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5004 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5005 * generate a priv, a hv emu or a no-op
5007 if (sprn & 0x10) {
5008 if (ctx->pr) {
5009 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5011 } else {
5012 if (ctx->pr || sprn == 0) {
5013 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5019 #if defined(TARGET_PPC64)
5020 /* setb */
5021 static void gen_setb(DisasContext *ctx)
5023 TCGv_i32 t0 = tcg_temp_new_i32();
5024 TCGv_i32 t8 = tcg_constant_i32(8);
5025 TCGv_i32 tm1 = tcg_constant_i32(-1);
5026 int crf = crfS(ctx->opcode);
5028 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5029 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5030 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5032 tcg_temp_free_i32(t0);
5034 #endif
5036 /*** Cache management ***/
5038 /* dcbf */
5039 static void gen_dcbf(DisasContext *ctx)
5041 /* XXX: specification says this is treated as a load by the MMU */
5042 TCGv t0;
5043 gen_set_access_type(ctx, ACCESS_CACHE);
5044 t0 = tcg_temp_new();
5045 gen_addr_reg_index(ctx, t0);
5046 gen_qemu_ld8u(ctx, t0, t0);
5047 tcg_temp_free(t0);
5050 /* dcbfep (external PID dcbf) */
5051 static void gen_dcbfep(DisasContext *ctx)
5053 /* XXX: specification says this is treated as a load by the MMU */
5054 TCGv t0;
5055 CHK_SV;
5056 gen_set_access_type(ctx, ACCESS_CACHE);
5057 t0 = tcg_temp_new();
5058 gen_addr_reg_index(ctx, t0);
5059 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5060 tcg_temp_free(t0);
5063 /* dcbi (Supervisor only) */
5064 static void gen_dcbi(DisasContext *ctx)
5066 #if defined(CONFIG_USER_ONLY)
5067 GEN_PRIV;
5068 #else
5069 TCGv EA, val;
5071 CHK_SV;
5072 EA = tcg_temp_new();
5073 gen_set_access_type(ctx, ACCESS_CACHE);
5074 gen_addr_reg_index(ctx, EA);
5075 val = tcg_temp_new();
5076 /* XXX: specification says this should be treated as a store by the MMU */
5077 gen_qemu_ld8u(ctx, val, EA);
5078 gen_qemu_st8(ctx, val, EA);
5079 tcg_temp_free(val);
5080 tcg_temp_free(EA);
5081 #endif /* defined(CONFIG_USER_ONLY) */
5084 /* dcdst */
5085 static void gen_dcbst(DisasContext *ctx)
5087 /* XXX: specification say this is treated as a load by the MMU */
5088 TCGv t0;
5089 gen_set_access_type(ctx, ACCESS_CACHE);
5090 t0 = tcg_temp_new();
5091 gen_addr_reg_index(ctx, t0);
5092 gen_qemu_ld8u(ctx, t0, t0);
5093 tcg_temp_free(t0);
5096 /* dcbstep (dcbstep External PID version) */
5097 static void gen_dcbstep(DisasContext *ctx)
5099 /* XXX: specification say this is treated as a load by the MMU */
5100 TCGv t0;
5101 gen_set_access_type(ctx, ACCESS_CACHE);
5102 t0 = tcg_temp_new();
5103 gen_addr_reg_index(ctx, t0);
5104 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5105 tcg_temp_free(t0);
5108 /* dcbt */
5109 static void gen_dcbt(DisasContext *ctx)
5112 * interpreted as no-op
5113 * XXX: specification say this is treated as a load by the MMU but
5114 * does not generate any exception
5118 /* dcbtep */
5119 static void gen_dcbtep(DisasContext *ctx)
5122 * interpreted as no-op
5123 * XXX: specification say this is treated as a load by the MMU but
5124 * does not generate any exception
5128 /* dcbtst */
5129 static void gen_dcbtst(DisasContext *ctx)
5132 * interpreted as no-op
5133 * XXX: specification say this is treated as a load by the MMU but
5134 * does not generate any exception
5138 /* dcbtstep */
5139 static void gen_dcbtstep(DisasContext *ctx)
5142 * interpreted as no-op
5143 * XXX: specification say this is treated as a load by the MMU but
5144 * does not generate any exception
5148 /* dcbtls */
5149 static void gen_dcbtls(DisasContext *ctx)
5151 /* Always fails locking the cache */
5152 TCGv t0 = tcg_temp_new();
5153 gen_load_spr(t0, SPR_Exxx_L1CSR0);
5154 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5155 gen_store_spr(SPR_Exxx_L1CSR0, t0);
5156 tcg_temp_free(t0);
5159 /* dcbz */
5160 static void gen_dcbz(DisasContext *ctx)
5162 TCGv tcgv_addr;
5163 TCGv_i32 tcgv_op;
5165 gen_set_access_type(ctx, ACCESS_CACHE);
5166 tcgv_addr = tcg_temp_new();
5167 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5168 gen_addr_reg_index(ctx, tcgv_addr);
5169 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5170 tcg_temp_free(tcgv_addr);
5171 tcg_temp_free_i32(tcgv_op);
5174 /* dcbzep */
5175 static void gen_dcbzep(DisasContext *ctx)
5177 TCGv tcgv_addr;
5178 TCGv_i32 tcgv_op;
5180 gen_set_access_type(ctx, ACCESS_CACHE);
5181 tcgv_addr = tcg_temp_new();
5182 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5183 gen_addr_reg_index(ctx, tcgv_addr);
5184 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5185 tcg_temp_free(tcgv_addr);
5186 tcg_temp_free_i32(tcgv_op);
5189 /* dst / dstt */
5190 static void gen_dst(DisasContext *ctx)
5192 if (rA(ctx->opcode) == 0) {
5193 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5194 } else {
5195 /* interpreted as no-op */
5199 /* dstst /dststt */
5200 static void gen_dstst(DisasContext *ctx)
5202 if (rA(ctx->opcode) == 0) {
5203 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5204 } else {
5205 /* interpreted as no-op */
5210 /* dss / dssall */
5211 static void gen_dss(DisasContext *ctx)
5213 /* interpreted as no-op */
5216 /* icbi */
5217 static void gen_icbi(DisasContext *ctx)
5219 TCGv t0;
5220 gen_set_access_type(ctx, ACCESS_CACHE);
5221 t0 = tcg_temp_new();
5222 gen_addr_reg_index(ctx, t0);
5223 gen_helper_icbi(cpu_env, t0);
5224 tcg_temp_free(t0);
5227 /* icbiep */
5228 static void gen_icbiep(DisasContext *ctx)
5230 TCGv t0;
5231 gen_set_access_type(ctx, ACCESS_CACHE);
5232 t0 = tcg_temp_new();
5233 gen_addr_reg_index(ctx, t0);
5234 gen_helper_icbiep(cpu_env, t0);
5235 tcg_temp_free(t0);
5238 /* Optional: */
5239 /* dcba */
5240 static void gen_dcba(DisasContext *ctx)
5243 * interpreted as no-op
5244 * XXX: specification say this is treated as a store by the MMU
5245 * but does not generate any exception
5249 /*** Segment register manipulation ***/
5250 /* Supervisor only: */
5252 /* mfsr */
5253 static void gen_mfsr(DisasContext *ctx)
5255 #if defined(CONFIG_USER_ONLY)
5256 GEN_PRIV;
5257 #else
5258 TCGv t0;
5260 CHK_SV;
5261 t0 = tcg_const_tl(SR(ctx->opcode));
5262 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5263 tcg_temp_free(t0);
5264 #endif /* defined(CONFIG_USER_ONLY) */
5267 /* mfsrin */
5268 static void gen_mfsrin(DisasContext *ctx)
5270 #if defined(CONFIG_USER_ONLY)
5271 GEN_PRIV;
5272 #else
5273 TCGv t0;
5275 CHK_SV;
5276 t0 = tcg_temp_new();
5277 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5278 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5279 tcg_temp_free(t0);
5280 #endif /* defined(CONFIG_USER_ONLY) */
5283 /* mtsr */
5284 static void gen_mtsr(DisasContext *ctx)
5286 #if defined(CONFIG_USER_ONLY)
5287 GEN_PRIV;
5288 #else
5289 TCGv t0;
5291 CHK_SV;
5292 t0 = tcg_const_tl(SR(ctx->opcode));
5293 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5294 tcg_temp_free(t0);
5295 #endif /* defined(CONFIG_USER_ONLY) */
5298 /* mtsrin */
5299 static void gen_mtsrin(DisasContext *ctx)
5301 #if defined(CONFIG_USER_ONLY)
5302 GEN_PRIV;
5303 #else
5304 TCGv t0;
5305 CHK_SV;
5307 t0 = tcg_temp_new();
5308 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5309 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5310 tcg_temp_free(t0);
5311 #endif /* defined(CONFIG_USER_ONLY) */
5314 #if defined(TARGET_PPC64)
5315 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5317 /* mfsr */
5318 static void gen_mfsr_64b(DisasContext *ctx)
5320 #if defined(CONFIG_USER_ONLY)
5321 GEN_PRIV;
5322 #else
5323 TCGv t0;
5325 CHK_SV;
5326 t0 = tcg_const_tl(SR(ctx->opcode));
5327 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5328 tcg_temp_free(t0);
5329 #endif /* defined(CONFIG_USER_ONLY) */
5332 /* mfsrin */
5333 static void gen_mfsrin_64b(DisasContext *ctx)
5335 #if defined(CONFIG_USER_ONLY)
5336 GEN_PRIV;
5337 #else
5338 TCGv t0;
5340 CHK_SV;
5341 t0 = tcg_temp_new();
5342 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5343 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5344 tcg_temp_free(t0);
5345 #endif /* defined(CONFIG_USER_ONLY) */
5348 /* mtsr */
5349 static void gen_mtsr_64b(DisasContext *ctx)
5351 #if defined(CONFIG_USER_ONLY)
5352 GEN_PRIV;
5353 #else
5354 TCGv t0;
5356 CHK_SV;
5357 t0 = tcg_const_tl(SR(ctx->opcode));
5358 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5359 tcg_temp_free(t0);
5360 #endif /* defined(CONFIG_USER_ONLY) */
5363 /* mtsrin */
5364 static void gen_mtsrin_64b(DisasContext *ctx)
5366 #if defined(CONFIG_USER_ONLY)
5367 GEN_PRIV;
5368 #else
5369 TCGv t0;
5371 CHK_SV;
5372 t0 = tcg_temp_new();
5373 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5374 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5375 tcg_temp_free(t0);
5376 #endif /* defined(CONFIG_USER_ONLY) */
5379 /* slbmte */
5380 static void gen_slbmte(DisasContext *ctx)
5382 #if defined(CONFIG_USER_ONLY)
5383 GEN_PRIV;
5384 #else
5385 CHK_SV;
5387 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
5388 cpu_gpr[rS(ctx->opcode)]);
5389 #endif /* defined(CONFIG_USER_ONLY) */
5392 static void gen_slbmfee(DisasContext *ctx)
5394 #if defined(CONFIG_USER_ONLY)
5395 GEN_PRIV;
5396 #else
5397 CHK_SV;
5399 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5400 cpu_gpr[rB(ctx->opcode)]);
5401 #endif /* defined(CONFIG_USER_ONLY) */
5404 static void gen_slbmfev(DisasContext *ctx)
5406 #if defined(CONFIG_USER_ONLY)
5407 GEN_PRIV;
5408 #else
5409 CHK_SV;
5411 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5412 cpu_gpr[rB(ctx->opcode)]);
5413 #endif /* defined(CONFIG_USER_ONLY) */
5416 static void gen_slbfee_(DisasContext *ctx)
5418 #if defined(CONFIG_USER_ONLY)
5419 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5420 #else
5421 TCGLabel *l1, *l2;
5423 if (unlikely(ctx->pr)) {
5424 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5425 return;
5427 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5428 cpu_gpr[rB(ctx->opcode)]);
5429 l1 = gen_new_label();
5430 l2 = gen_new_label();
5431 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5432 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
5433 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
5434 tcg_gen_br(l2);
5435 gen_set_label(l1);
5436 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
5437 gen_set_label(l2);
5438 #endif
5440 #endif /* defined(TARGET_PPC64) */
5442 /*** Lookaside buffer management ***/
5443 /* Optional & supervisor only: */
5445 /* tlbia */
5446 static void gen_tlbia(DisasContext *ctx)
5448 #if defined(CONFIG_USER_ONLY)
5449 GEN_PRIV;
5450 #else
5451 CHK_HV;
5453 gen_helper_tlbia(cpu_env);
5454 #endif /* defined(CONFIG_USER_ONLY) */
5457 /* tlbiel */
5458 static void gen_tlbiel(DisasContext *ctx)
5460 #if defined(CONFIG_USER_ONLY)
5461 GEN_PRIV;
5462 #else
5463 bool psr = (ctx->opcode >> 17) & 0x1;
5465 if (ctx->pr || (!ctx->hv && !psr && ctx->hr)) {
5467 * tlbiel is privileged except when PSR=0 and HR=1, making it
5468 * hypervisor privileged.
5470 GEN_PRIV;
5473 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5474 #endif /* defined(CONFIG_USER_ONLY) */
5477 /* tlbie */
5478 static void gen_tlbie(DisasContext *ctx)
5480 #if defined(CONFIG_USER_ONLY)
5481 GEN_PRIV;
5482 #else
5483 bool psr = (ctx->opcode >> 17) & 0x1;
5484 TCGv_i32 t1;
5486 if (ctx->pr) {
5487 /* tlbie is privileged... */
5488 GEN_PRIV;
5489 } else if (!ctx->hv) {
5490 if (!ctx->gtse || (!psr && ctx->hr)) {
5492 * ... except when GTSE=0 or when PSR=0 and HR=1, making it
5493 * hypervisor privileged.
5495 GEN_PRIV;
5499 if (NARROW_MODE(ctx)) {
5500 TCGv t0 = tcg_temp_new();
5501 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
5502 gen_helper_tlbie(cpu_env, t0);
5503 tcg_temp_free(t0);
5504 } else {
5505 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5507 t1 = tcg_temp_new_i32();
5508 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5509 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
5510 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5511 tcg_temp_free_i32(t1);
5512 #endif /* defined(CONFIG_USER_ONLY) */
5515 /* tlbsync */
5516 static void gen_tlbsync(DisasContext *ctx)
5518 #if defined(CONFIG_USER_ONLY)
5519 GEN_PRIV;
5520 #else
5522 if (ctx->gtse) {
5523 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */
5524 } else {
5525 CHK_HV; /* Else hypervisor privileged */
5528 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5529 if (ctx->insns_flags & PPC_BOOKE) {
5530 gen_check_tlb_flush(ctx, true);
5532 #endif /* defined(CONFIG_USER_ONLY) */
5535 #if defined(TARGET_PPC64)
5536 /* slbia */
5537 static void gen_slbia(DisasContext *ctx)
5539 #if defined(CONFIG_USER_ONLY)
5540 GEN_PRIV;
5541 #else
5542 uint32_t ih = (ctx->opcode >> 21) & 0x7;
5543 TCGv_i32 t0 = tcg_const_i32(ih);
5545 CHK_SV;
5547 gen_helper_slbia(cpu_env, t0);
5548 tcg_temp_free_i32(t0);
5549 #endif /* defined(CONFIG_USER_ONLY) */
5552 /* slbie */
5553 static void gen_slbie(DisasContext *ctx)
5555 #if defined(CONFIG_USER_ONLY)
5556 GEN_PRIV;
5557 #else
5558 CHK_SV;
5560 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5561 #endif /* defined(CONFIG_USER_ONLY) */
5564 /* slbieg */
5565 static void gen_slbieg(DisasContext *ctx)
5567 #if defined(CONFIG_USER_ONLY)
5568 GEN_PRIV;
5569 #else
5570 CHK_SV;
5572 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5573 #endif /* defined(CONFIG_USER_ONLY) */
5576 /* slbsync */
5577 static void gen_slbsync(DisasContext *ctx)
5579 #if defined(CONFIG_USER_ONLY)
5580 GEN_PRIV;
5581 #else
5582 CHK_SV;
5583 gen_check_tlb_flush(ctx, true);
5584 #endif /* defined(CONFIG_USER_ONLY) */
5587 #endif /* defined(TARGET_PPC64) */
5589 /*** External control ***/
5590 /* Optional: */
5592 /* eciwx */
5593 static void gen_eciwx(DisasContext *ctx)
5595 TCGv t0;
5596 /* Should check EAR[E] ! */
5597 gen_set_access_type(ctx, ACCESS_EXT);
5598 t0 = tcg_temp_new();
5599 gen_addr_reg_index(ctx, t0);
5600 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5601 DEF_MEMOP(MO_UL | MO_ALIGN));
5602 tcg_temp_free(t0);
5605 /* ecowx */
5606 static void gen_ecowx(DisasContext *ctx)
5608 TCGv t0;
5609 /* Should check EAR[E] ! */
5610 gen_set_access_type(ctx, ACCESS_EXT);
5611 t0 = tcg_temp_new();
5612 gen_addr_reg_index(ctx, t0);
5613 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5614 DEF_MEMOP(MO_UL | MO_ALIGN));
5615 tcg_temp_free(t0);
5618 /* PowerPC 601 specific instructions */
5620 /* abs - abs. */
5621 static void gen_abs(DisasContext *ctx)
5623 TCGv d = cpu_gpr[rD(ctx->opcode)];
5624 TCGv a = cpu_gpr[rA(ctx->opcode)];
5626 tcg_gen_abs_tl(d, a);
5627 if (unlikely(Rc(ctx->opcode) != 0)) {
5628 gen_set_Rc0(ctx, d);
5632 /* abso - abso. */
5633 static void gen_abso(DisasContext *ctx)
5635 TCGv d = cpu_gpr[rD(ctx->opcode)];
5636 TCGv a = cpu_gpr[rA(ctx->opcode)];
5638 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000);
5639 tcg_gen_abs_tl(d, a);
5640 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
5641 if (unlikely(Rc(ctx->opcode) != 0)) {
5642 gen_set_Rc0(ctx, d);
5646 /* clcs */
5647 static void gen_clcs(DisasContext *ctx)
5649 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
5650 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5651 tcg_temp_free_i32(t0);
5652 /* Rc=1 sets CR0 to an undefined state */
5655 /* div - div. */
5656 static void gen_div(DisasContext *ctx)
5658 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
5659 cpu_gpr[rB(ctx->opcode)]);
5660 if (unlikely(Rc(ctx->opcode) != 0)) {
5661 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5665 /* divo - divo. */
5666 static void gen_divo(DisasContext *ctx)
5668 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
5669 cpu_gpr[rB(ctx->opcode)]);
5670 if (unlikely(Rc(ctx->opcode) != 0)) {
5671 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5675 /* divs - divs. */
5676 static void gen_divs(DisasContext *ctx)
5678 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
5679 cpu_gpr[rB(ctx->opcode)]);
5680 if (unlikely(Rc(ctx->opcode) != 0)) {
5681 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5685 /* divso - divso. */
5686 static void gen_divso(DisasContext *ctx)
5688 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
5689 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5690 if (unlikely(Rc(ctx->opcode) != 0)) {
5691 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5695 /* doz - doz. */
5696 static void gen_doz(DisasContext *ctx)
5698 TCGLabel *l1 = gen_new_label();
5699 TCGLabel *l2 = gen_new_label();
5700 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
5701 cpu_gpr[rA(ctx->opcode)], l1);
5702 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5703 cpu_gpr[rA(ctx->opcode)]);
5704 tcg_gen_br(l2);
5705 gen_set_label(l1);
5706 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
5707 gen_set_label(l2);
5708 if (unlikely(Rc(ctx->opcode) != 0)) {
5709 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5713 /* dozo - dozo. */
5714 static void gen_dozo(DisasContext *ctx)
5716 TCGLabel *l1 = gen_new_label();
5717 TCGLabel *l2 = gen_new_label();
5718 TCGv t0 = tcg_temp_new();
5719 TCGv t1 = tcg_temp_new();
5720 TCGv t2 = tcg_temp_new();
5721 /* Start with XER OV disabled, the most likely case */
5722 tcg_gen_movi_tl(cpu_ov, 0);
5723 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
5724 cpu_gpr[rA(ctx->opcode)], l1);
5725 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5726 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5727 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
5728 tcg_gen_andc_tl(t1, t1, t2);
5729 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
5730 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
5731 tcg_gen_movi_tl(cpu_ov, 1);
5732 tcg_gen_movi_tl(cpu_so, 1);
5733 tcg_gen_br(l2);
5734 gen_set_label(l1);
5735 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
5736 gen_set_label(l2);
5737 tcg_temp_free(t0);
5738 tcg_temp_free(t1);
5739 tcg_temp_free(t2);
5740 if (unlikely(Rc(ctx->opcode) != 0)) {
5741 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5745 /* dozi */
5746 static void gen_dozi(DisasContext *ctx)
5748 target_long simm = SIMM(ctx->opcode);
5749 TCGLabel *l1 = gen_new_label();
5750 TCGLabel *l2 = gen_new_label();
5751 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
5752 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
5753 tcg_gen_br(l2);
5754 gen_set_label(l1);
5755 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
5756 gen_set_label(l2);
5757 if (unlikely(Rc(ctx->opcode) != 0)) {
5758 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5762 /* lscbx - lscbx. */
5763 static void gen_lscbx(DisasContext *ctx)
5765 TCGv t0 = tcg_temp_new();
5766 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
5767 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
5768 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
5770 gen_addr_reg_index(ctx, t0);
5771 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
5772 tcg_temp_free_i32(t1);
5773 tcg_temp_free_i32(t2);
5774 tcg_temp_free_i32(t3);
5775 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
5776 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
5777 if (unlikely(Rc(ctx->opcode) != 0)) {
5778 gen_set_Rc0(ctx, t0);
5780 tcg_temp_free(t0);
5783 /* maskg - maskg. */
5784 static void gen_maskg(DisasContext *ctx)
5786 TCGLabel *l1 = gen_new_label();
5787 TCGv t0 = tcg_temp_new();
5788 TCGv t1 = tcg_temp_new();
5789 TCGv t2 = tcg_temp_new();
5790 TCGv t3 = tcg_temp_new();
5791 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
5792 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5793 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
5794 tcg_gen_addi_tl(t2, t0, 1);
5795 tcg_gen_shr_tl(t2, t3, t2);
5796 tcg_gen_shr_tl(t3, t3, t1);
5797 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
5798 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
5799 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5800 gen_set_label(l1);
5801 tcg_temp_free(t0);
5802 tcg_temp_free(t1);
5803 tcg_temp_free(t2);
5804 tcg_temp_free(t3);
5805 if (unlikely(Rc(ctx->opcode) != 0)) {
5806 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5810 /* maskir - maskir. */
5811 static void gen_maskir(DisasContext *ctx)
5813 TCGv t0 = tcg_temp_new();
5814 TCGv t1 = tcg_temp_new();
5815 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5816 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5817 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5818 tcg_temp_free(t0);
5819 tcg_temp_free(t1);
5820 if (unlikely(Rc(ctx->opcode) != 0)) {
5821 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5825 /* mul - mul. */
5826 static void gen_mul(DisasContext *ctx)
5828 TCGv_i64 t0 = tcg_temp_new_i64();
5829 TCGv_i64 t1 = tcg_temp_new_i64();
5830 TCGv t2 = tcg_temp_new();
5831 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
5832 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
5833 tcg_gen_mul_i64(t0, t0, t1);
5834 tcg_gen_trunc_i64_tl(t2, t0);
5835 gen_store_spr(SPR_MQ, t2);
5836 tcg_gen_shri_i64(t1, t0, 32);
5837 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
5838 tcg_temp_free_i64(t0);
5839 tcg_temp_free_i64(t1);
5840 tcg_temp_free(t2);
5841 if (unlikely(Rc(ctx->opcode) != 0)) {
5842 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5846 /* mulo - mulo. */
5847 static void gen_mulo(DisasContext *ctx)
5849 TCGLabel *l1 = gen_new_label();
5850 TCGv_i64 t0 = tcg_temp_new_i64();
5851 TCGv_i64 t1 = tcg_temp_new_i64();
5852 TCGv t2 = tcg_temp_new();
5853 /* Start with XER OV disabled, the most likely case */
5854 tcg_gen_movi_tl(cpu_ov, 0);
5855 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
5856 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
5857 tcg_gen_mul_i64(t0, t0, t1);
5858 tcg_gen_trunc_i64_tl(t2, t0);
5859 gen_store_spr(SPR_MQ, t2);
5860 tcg_gen_shri_i64(t1, t0, 32);
5861 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
5862 tcg_gen_ext32s_i64(t1, t0);
5863 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
5864 tcg_gen_movi_tl(cpu_ov, 1);
5865 tcg_gen_movi_tl(cpu_so, 1);
5866 gen_set_label(l1);
5867 tcg_temp_free_i64(t0);
5868 tcg_temp_free_i64(t1);
5869 tcg_temp_free(t2);
5870 if (unlikely(Rc(ctx->opcode) != 0)) {
5871 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5875 /* nabs - nabs. */
5876 static void gen_nabs(DisasContext *ctx)
5878 TCGv d = cpu_gpr[rD(ctx->opcode)];
5879 TCGv a = cpu_gpr[rA(ctx->opcode)];
5881 tcg_gen_abs_tl(d, a);
5882 tcg_gen_neg_tl(d, d);
5883 if (unlikely(Rc(ctx->opcode) != 0)) {
5884 gen_set_Rc0(ctx, d);
5888 /* nabso - nabso. */
5889 static void gen_nabso(DisasContext *ctx)
5891 TCGv d = cpu_gpr[rD(ctx->opcode)];
5892 TCGv a = cpu_gpr[rA(ctx->opcode)];
5894 tcg_gen_abs_tl(d, a);
5895 tcg_gen_neg_tl(d, d);
5896 /* nabs never overflows */
5897 tcg_gen_movi_tl(cpu_ov, 0);
5898 if (unlikely(Rc(ctx->opcode) != 0)) {
5899 gen_set_Rc0(ctx, d);
5903 /* rlmi - rlmi. */
5904 static void gen_rlmi(DisasContext *ctx)
5906 uint32_t mb = MB(ctx->opcode);
5907 uint32_t me = ME(ctx->opcode);
5908 TCGv t0 = tcg_temp_new();
5909 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5910 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5911 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
5912 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
5913 ~MASK(mb, me));
5914 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
5915 tcg_temp_free(t0);
5916 if (unlikely(Rc(ctx->opcode) != 0)) {
5917 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5921 /* rrib - rrib. */
5922 static void gen_rrib(DisasContext *ctx)
5924 TCGv t0 = tcg_temp_new();
5925 TCGv t1 = tcg_temp_new();
5926 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5927 tcg_gen_movi_tl(t1, 0x80000000);
5928 tcg_gen_shr_tl(t1, t1, t0);
5929 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5930 tcg_gen_and_tl(t0, t0, t1);
5931 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
5932 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5933 tcg_temp_free(t0);
5934 tcg_temp_free(t1);
5935 if (unlikely(Rc(ctx->opcode) != 0)) {
5936 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5940 /* sle - sle. */
5941 static void gen_sle(DisasContext *ctx)
5943 TCGv t0 = tcg_temp_new();
5944 TCGv t1 = tcg_temp_new();
5945 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5946 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5947 tcg_gen_subfi_tl(t1, 32, t1);
5948 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5949 tcg_gen_or_tl(t1, t0, t1);
5950 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5951 gen_store_spr(SPR_MQ, t1);
5952 tcg_temp_free(t0);
5953 tcg_temp_free(t1);
5954 if (unlikely(Rc(ctx->opcode) != 0)) {
5955 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5959 /* sleq - sleq. */
5960 static void gen_sleq(DisasContext *ctx)
5962 TCGv t0 = tcg_temp_new();
5963 TCGv t1 = tcg_temp_new();
5964 TCGv t2 = tcg_temp_new();
5965 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5966 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
5967 tcg_gen_shl_tl(t2, t2, t0);
5968 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5969 gen_load_spr(t1, SPR_MQ);
5970 gen_store_spr(SPR_MQ, t0);
5971 tcg_gen_and_tl(t0, t0, t2);
5972 tcg_gen_andc_tl(t1, t1, t2);
5973 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5974 tcg_temp_free(t0);
5975 tcg_temp_free(t1);
5976 tcg_temp_free(t2);
5977 if (unlikely(Rc(ctx->opcode) != 0)) {
5978 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5982 /* sliq - sliq. */
5983 static void gen_sliq(DisasContext *ctx)
5985 int sh = SH(ctx->opcode);
5986 TCGv t0 = tcg_temp_new();
5987 TCGv t1 = tcg_temp_new();
5988 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5989 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5990 tcg_gen_or_tl(t1, t0, t1);
5991 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5992 gen_store_spr(SPR_MQ, t1);
5993 tcg_temp_free(t0);
5994 tcg_temp_free(t1);
5995 if (unlikely(Rc(ctx->opcode) != 0)) {
5996 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6000 /* slliq - slliq. */
6001 static void gen_slliq(DisasContext *ctx)
6003 int sh = SH(ctx->opcode);
6004 TCGv t0 = tcg_temp_new();
6005 TCGv t1 = tcg_temp_new();
6006 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6007 gen_load_spr(t1, SPR_MQ);
6008 gen_store_spr(SPR_MQ, t0);
6009 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
6010 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
6011 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6012 tcg_temp_free(t0);
6013 tcg_temp_free(t1);
6014 if (unlikely(Rc(ctx->opcode) != 0)) {
6015 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6019 /* sllq - sllq. */
6020 static void gen_sllq(DisasContext *ctx)
6022 TCGLabel *l1 = gen_new_label();
6023 TCGLabel *l2 = gen_new_label();
6024 TCGv t0 = tcg_temp_local_new();
6025 TCGv t1 = tcg_temp_local_new();
6026 TCGv t2 = tcg_temp_local_new();
6027 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6028 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6029 tcg_gen_shl_tl(t1, t1, t2);
6030 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6031 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6032 gen_load_spr(t0, SPR_MQ);
6033 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6034 tcg_gen_br(l2);
6035 gen_set_label(l1);
6036 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6037 gen_load_spr(t2, SPR_MQ);
6038 tcg_gen_andc_tl(t1, t2, t1);
6039 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6040 gen_set_label(l2);
6041 tcg_temp_free(t0);
6042 tcg_temp_free(t1);
6043 tcg_temp_free(t2);
6044 if (unlikely(Rc(ctx->opcode) != 0)) {
6045 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6049 /* slq - slq. */
6050 static void gen_slq(DisasContext *ctx)
6052 TCGLabel *l1 = gen_new_label();
6053 TCGv t0 = tcg_temp_new();
6054 TCGv t1 = tcg_temp_new();
6055 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6056 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6057 tcg_gen_subfi_tl(t1, 32, t1);
6058 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6059 tcg_gen_or_tl(t1, t0, t1);
6060 gen_store_spr(SPR_MQ, t1);
6061 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6062 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6063 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6064 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6065 gen_set_label(l1);
6066 tcg_temp_free(t0);
6067 tcg_temp_free(t1);
6068 if (unlikely(Rc(ctx->opcode) != 0)) {
6069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6073 /* sraiq - sraiq. */
6074 static void gen_sraiq(DisasContext *ctx)
6076 int sh = SH(ctx->opcode);
6077 TCGLabel *l1 = gen_new_label();
6078 TCGv t0 = tcg_temp_new();
6079 TCGv t1 = tcg_temp_new();
6080 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6081 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6082 tcg_gen_or_tl(t0, t0, t1);
6083 gen_store_spr(SPR_MQ, t0);
6084 tcg_gen_movi_tl(cpu_ca, 0);
6085 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6086 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
6087 tcg_gen_movi_tl(cpu_ca, 1);
6088 gen_set_label(l1);
6089 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
6090 tcg_temp_free(t0);
6091 tcg_temp_free(t1);
6092 if (unlikely(Rc(ctx->opcode) != 0)) {
6093 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6097 /* sraq - sraq. */
6098 static void gen_sraq(DisasContext *ctx)
6100 TCGLabel *l1 = gen_new_label();
6101 TCGLabel *l2 = gen_new_label();
6102 TCGv t0 = tcg_temp_new();
6103 TCGv t1 = tcg_temp_local_new();
6104 TCGv t2 = tcg_temp_local_new();
6105 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6106 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6107 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
6108 tcg_gen_subfi_tl(t2, 32, t2);
6109 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
6110 tcg_gen_or_tl(t0, t0, t2);
6111 gen_store_spr(SPR_MQ, t0);
6112 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6113 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
6114 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
6115 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
6116 gen_set_label(l1);
6117 tcg_temp_free(t0);
6118 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
6119 tcg_gen_movi_tl(cpu_ca, 0);
6120 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6121 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
6122 tcg_gen_movi_tl(cpu_ca, 1);
6123 gen_set_label(l2);
6124 tcg_temp_free(t1);
6125 tcg_temp_free(t2);
6126 if (unlikely(Rc(ctx->opcode) != 0)) {
6127 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6131 /* sre - sre. */
6132 static void gen_sre(DisasContext *ctx)
6134 TCGv t0 = tcg_temp_new();
6135 TCGv t1 = tcg_temp_new();
6136 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6137 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6138 tcg_gen_subfi_tl(t1, 32, t1);
6139 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6140 tcg_gen_or_tl(t1, t0, t1);
6141 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6142 gen_store_spr(SPR_MQ, t1);
6143 tcg_temp_free(t0);
6144 tcg_temp_free(t1);
6145 if (unlikely(Rc(ctx->opcode) != 0)) {
6146 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6150 /* srea - srea. */
6151 static void gen_srea(DisasContext *ctx)
6153 TCGv t0 = tcg_temp_new();
6154 TCGv t1 = tcg_temp_new();
6155 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6156 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6157 gen_store_spr(SPR_MQ, t0);
6158 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
6159 tcg_temp_free(t0);
6160 tcg_temp_free(t1);
6161 if (unlikely(Rc(ctx->opcode) != 0)) {
6162 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6166 /* sreq */
6167 static void gen_sreq(DisasContext *ctx)
6169 TCGv t0 = tcg_temp_new();
6170 TCGv t1 = tcg_temp_new();
6171 TCGv t2 = tcg_temp_new();
6172 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6173 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6174 tcg_gen_shr_tl(t1, t1, t0);
6175 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6176 gen_load_spr(t2, SPR_MQ);
6177 gen_store_spr(SPR_MQ, t0);
6178 tcg_gen_and_tl(t0, t0, t1);
6179 tcg_gen_andc_tl(t2, t2, t1);
6180 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6181 tcg_temp_free(t0);
6182 tcg_temp_free(t1);
6183 tcg_temp_free(t2);
6184 if (unlikely(Rc(ctx->opcode) != 0)) {
6185 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6189 /* sriq */
6190 static void gen_sriq(DisasContext *ctx)
6192 int sh = SH(ctx->opcode);
6193 TCGv t0 = tcg_temp_new();
6194 TCGv t1 = tcg_temp_new();
6195 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6196 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6197 tcg_gen_or_tl(t1, t0, t1);
6198 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6199 gen_store_spr(SPR_MQ, t1);
6200 tcg_temp_free(t0);
6201 tcg_temp_free(t1);
6202 if (unlikely(Rc(ctx->opcode) != 0)) {
6203 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6207 /* srliq */
6208 static void gen_srliq(DisasContext *ctx)
6210 int sh = SH(ctx->opcode);
6211 TCGv t0 = tcg_temp_new();
6212 TCGv t1 = tcg_temp_new();
6213 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6214 gen_load_spr(t1, SPR_MQ);
6215 gen_store_spr(SPR_MQ, t0);
6216 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
6217 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
6218 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6219 tcg_temp_free(t0);
6220 tcg_temp_free(t1);
6221 if (unlikely(Rc(ctx->opcode) != 0)) {
6222 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6226 /* srlq */
6227 static void gen_srlq(DisasContext *ctx)
6229 TCGLabel *l1 = gen_new_label();
6230 TCGLabel *l2 = gen_new_label();
6231 TCGv t0 = tcg_temp_local_new();
6232 TCGv t1 = tcg_temp_local_new();
6233 TCGv t2 = tcg_temp_local_new();
6234 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6235 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6236 tcg_gen_shr_tl(t2, t1, t2);
6237 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6238 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6239 gen_load_spr(t0, SPR_MQ);
6240 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6241 tcg_gen_br(l2);
6242 gen_set_label(l1);
6243 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6244 tcg_gen_and_tl(t0, t0, t2);
6245 gen_load_spr(t1, SPR_MQ);
6246 tcg_gen_andc_tl(t1, t1, t2);
6247 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6248 gen_set_label(l2);
6249 tcg_temp_free(t0);
6250 tcg_temp_free(t1);
6251 tcg_temp_free(t2);
6252 if (unlikely(Rc(ctx->opcode) != 0)) {
6253 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6257 /* srq */
6258 static void gen_srq(DisasContext *ctx)
6260 TCGLabel *l1 = gen_new_label();
6261 TCGv t0 = tcg_temp_new();
6262 TCGv t1 = tcg_temp_new();
6263 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6264 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6265 tcg_gen_subfi_tl(t1, 32, t1);
6266 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6267 tcg_gen_or_tl(t1, t0, t1);
6268 gen_store_spr(SPR_MQ, t1);
6269 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6270 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6271 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6272 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6273 gen_set_label(l1);
6274 tcg_temp_free(t0);
6275 tcg_temp_free(t1);
6276 if (unlikely(Rc(ctx->opcode) != 0)) {
6277 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6281 /* PowerPC 602 specific instructions */
6283 /* dsa */
6284 static void gen_dsa(DisasContext *ctx)
6286 /* XXX: TODO */
6287 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6290 /* esa */
6291 static void gen_esa(DisasContext *ctx)
6293 /* XXX: TODO */
6294 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6297 /* mfrom */
6298 static void gen_mfrom(DisasContext *ctx)
6300 #if defined(CONFIG_USER_ONLY)
6301 GEN_PRIV;
6302 #else
6303 CHK_SV;
6304 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6305 #endif /* defined(CONFIG_USER_ONLY) */
6308 /* 602 - 603 - G2 TLB management */
6310 /* tlbld */
6311 static void gen_tlbld_6xx(DisasContext *ctx)
6313 #if defined(CONFIG_USER_ONLY)
6314 GEN_PRIV;
6315 #else
6316 CHK_SV;
6317 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6318 #endif /* defined(CONFIG_USER_ONLY) */
6321 /* tlbli */
6322 static void gen_tlbli_6xx(DisasContext *ctx)
6324 #if defined(CONFIG_USER_ONLY)
6325 GEN_PRIV;
6326 #else
6327 CHK_SV;
6328 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6329 #endif /* defined(CONFIG_USER_ONLY) */
6332 /* POWER instructions not in PowerPC 601 */
6334 /* clf */
6335 static void gen_clf(DisasContext *ctx)
6337 /* Cache line flush: implemented as no-op */
6340 /* cli */
6341 static void gen_cli(DisasContext *ctx)
6343 #if defined(CONFIG_USER_ONLY)
6344 GEN_PRIV;
6345 #else
6346 /* Cache line invalidate: privileged and treated as no-op */
6347 CHK_SV;
6348 #endif /* defined(CONFIG_USER_ONLY) */
6351 /* dclst */
6352 static void gen_dclst(DisasContext *ctx)
6354 /* Data cache line store: treated as no-op */
6357 static void gen_mfsri(DisasContext *ctx)
6359 #if defined(CONFIG_USER_ONLY)
6360 GEN_PRIV;
6361 #else
6362 int ra = rA(ctx->opcode);
6363 int rd = rD(ctx->opcode);
6364 TCGv t0;
6366 CHK_SV;
6367 t0 = tcg_temp_new();
6368 gen_addr_reg_index(ctx, t0);
6369 tcg_gen_extract_tl(t0, t0, 28, 4);
6370 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
6371 tcg_temp_free(t0);
6372 if (ra != 0 && ra != rd) {
6373 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
6375 #endif /* defined(CONFIG_USER_ONLY) */
6378 static void gen_rac(DisasContext *ctx)
6380 #if defined(CONFIG_USER_ONLY)
6381 GEN_PRIV;
6382 #else
6383 TCGv t0;
6385 CHK_SV;
6386 t0 = tcg_temp_new();
6387 gen_addr_reg_index(ctx, t0);
6388 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6389 tcg_temp_free(t0);
6390 #endif /* defined(CONFIG_USER_ONLY) */
6393 static void gen_rfsvc(DisasContext *ctx)
6395 #if defined(CONFIG_USER_ONLY)
6396 GEN_PRIV;
6397 #else
6398 CHK_SV;
6400 gen_helper_rfsvc(cpu_env);
6401 ctx->base.is_jmp = DISAS_EXIT;
6402 #endif /* defined(CONFIG_USER_ONLY) */
6405 /* svc is not implemented for now */
6407 /* BookE specific instructions */
6409 /* XXX: not implemented on 440 ? */
6410 static void gen_mfapidi(DisasContext *ctx)
6412 /* XXX: TODO */
6413 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6416 /* XXX: not implemented on 440 ? */
6417 static void gen_tlbiva(DisasContext *ctx)
6419 #if defined(CONFIG_USER_ONLY)
6420 GEN_PRIV;
6421 #else
6422 TCGv t0;
6424 CHK_SV;
6425 t0 = tcg_temp_new();
6426 gen_addr_reg_index(ctx, t0);
6427 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6428 tcg_temp_free(t0);
6429 #endif /* defined(CONFIG_USER_ONLY) */
6432 /* All 405 MAC instructions are translated here */
6433 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
6434 int ra, int rb, int rt, int Rc)
6436 TCGv t0, t1;
6438 t0 = tcg_temp_local_new();
6439 t1 = tcg_temp_local_new();
6441 switch (opc3 & 0x0D) {
6442 case 0x05:
6443 /* macchw - macchw. - macchwo - macchwo. */
6444 /* macchws - macchws. - macchwso - macchwso. */
6445 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
6446 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
6447 /* mulchw - mulchw. */
6448 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6449 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6450 tcg_gen_ext16s_tl(t1, t1);
6451 break;
6452 case 0x04:
6453 /* macchwu - macchwu. - macchwuo - macchwuo. */
6454 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
6455 /* mulchwu - mulchwu. */
6456 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6457 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6458 tcg_gen_ext16u_tl(t1, t1);
6459 break;
6460 case 0x01:
6461 /* machhw - machhw. - machhwo - machhwo. */
6462 /* machhws - machhws. - machhwso - machhwso. */
6463 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
6464 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
6465 /* mulhhw - mulhhw. */
6466 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
6467 tcg_gen_ext16s_tl(t0, t0);
6468 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6469 tcg_gen_ext16s_tl(t1, t1);
6470 break;
6471 case 0x00:
6472 /* machhwu - machhwu. - machhwuo - machhwuo. */
6473 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
6474 /* mulhhwu - mulhhwu. */
6475 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
6476 tcg_gen_ext16u_tl(t0, t0);
6477 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6478 tcg_gen_ext16u_tl(t1, t1);
6479 break;
6480 case 0x0D:
6481 /* maclhw - maclhw. - maclhwo - maclhwo. */
6482 /* maclhws - maclhws. - maclhwso - maclhwso. */
6483 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
6484 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
6485 /* mullhw - mullhw. */
6486 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6487 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
6488 break;
6489 case 0x0C:
6490 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
6491 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
6492 /* mullhwu - mullhwu. */
6493 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6494 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
6495 break;
6497 if (opc2 & 0x04) {
6498 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
6499 tcg_gen_mul_tl(t1, t0, t1);
6500 if (opc2 & 0x02) {
6501 /* nmultiply-and-accumulate (0x0E) */
6502 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
6503 } else {
6504 /* multiply-and-accumulate (0x0C) */
6505 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
6508 if (opc3 & 0x12) {
6509 /* Check overflow and/or saturate */
6510 TCGLabel *l1 = gen_new_label();
6512 if (opc3 & 0x10) {
6513 /* Start with XER OV disabled, the most likely case */
6514 tcg_gen_movi_tl(cpu_ov, 0);
6516 if (opc3 & 0x01) {
6517 /* Signed */
6518 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
6519 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
6520 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
6521 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
6522 if (opc3 & 0x02) {
6523 /* Saturate */
6524 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
6525 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
6527 } else {
6528 /* Unsigned */
6529 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
6530 if (opc3 & 0x02) {
6531 /* Saturate */
6532 tcg_gen_movi_tl(t0, UINT32_MAX);
6535 if (opc3 & 0x10) {
6536 /* Check overflow */
6537 tcg_gen_movi_tl(cpu_ov, 1);
6538 tcg_gen_movi_tl(cpu_so, 1);
6540 gen_set_label(l1);
6541 tcg_gen_mov_tl(cpu_gpr[rt], t0);
6543 } else {
6544 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
6546 tcg_temp_free(t0);
6547 tcg_temp_free(t1);
6548 if (unlikely(Rc) != 0) {
6549 /* Update Rc0 */
6550 gen_set_Rc0(ctx, cpu_gpr[rt]);
6554 #define GEN_MAC_HANDLER(name, opc2, opc3) \
6555 static void glue(gen_, name)(DisasContext *ctx) \
6557 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
6558 rD(ctx->opcode), Rc(ctx->opcode)); \
6561 /* macchw - macchw. */
6562 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
6563 /* macchwo - macchwo. */
6564 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
6565 /* macchws - macchws. */
6566 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
6567 /* macchwso - macchwso. */
6568 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
6569 /* macchwsu - macchwsu. */
6570 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
6571 /* macchwsuo - macchwsuo. */
6572 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
6573 /* macchwu - macchwu. */
6574 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
6575 /* macchwuo - macchwuo. */
6576 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
6577 /* machhw - machhw. */
6578 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
6579 /* machhwo - machhwo. */
6580 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
6581 /* machhws - machhws. */
6582 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
6583 /* machhwso - machhwso. */
6584 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
6585 /* machhwsu - machhwsu. */
6586 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
6587 /* machhwsuo - machhwsuo. */
6588 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
6589 /* machhwu - machhwu. */
6590 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
6591 /* machhwuo - machhwuo. */
6592 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
6593 /* maclhw - maclhw. */
6594 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
6595 /* maclhwo - maclhwo. */
6596 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
6597 /* maclhws - maclhws. */
6598 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
6599 /* maclhwso - maclhwso. */
6600 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
6601 /* maclhwu - maclhwu. */
6602 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
6603 /* maclhwuo - maclhwuo. */
6604 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
6605 /* maclhwsu - maclhwsu. */
6606 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
6607 /* maclhwsuo - maclhwsuo. */
6608 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
6609 /* nmacchw - nmacchw. */
6610 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
6611 /* nmacchwo - nmacchwo. */
6612 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
6613 /* nmacchws - nmacchws. */
6614 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
6615 /* nmacchwso - nmacchwso. */
6616 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
6617 /* nmachhw - nmachhw. */
6618 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
6619 /* nmachhwo - nmachhwo. */
6620 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
6621 /* nmachhws - nmachhws. */
6622 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
6623 /* nmachhwso - nmachhwso. */
6624 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
6625 /* nmaclhw - nmaclhw. */
6626 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
6627 /* nmaclhwo - nmaclhwo. */
6628 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
6629 /* nmaclhws - nmaclhws. */
6630 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
6631 /* nmaclhwso - nmaclhwso. */
6632 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
6634 /* mulchw - mulchw. */
6635 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
6636 /* mulchwu - mulchwu. */
6637 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
6638 /* mulhhw - mulhhw. */
6639 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
6640 /* mulhhwu - mulhhwu. */
6641 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
6642 /* mullhw - mullhw. */
6643 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
6644 /* mullhwu - mullhwu. */
6645 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
6647 /* mfdcr */
6648 static void gen_mfdcr(DisasContext *ctx)
6650 #if defined(CONFIG_USER_ONLY)
6651 GEN_PRIV;
6652 #else
6653 TCGv dcrn;
6655 CHK_SV;
6656 dcrn = tcg_const_tl(SPR(ctx->opcode));
6657 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
6658 tcg_temp_free(dcrn);
6659 #endif /* defined(CONFIG_USER_ONLY) */
6662 /* mtdcr */
6663 static void gen_mtdcr(DisasContext *ctx)
6665 #if defined(CONFIG_USER_ONLY)
6666 GEN_PRIV;
6667 #else
6668 TCGv dcrn;
6670 CHK_SV;
6671 dcrn = tcg_const_tl(SPR(ctx->opcode));
6672 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
6673 tcg_temp_free(dcrn);
6674 #endif /* defined(CONFIG_USER_ONLY) */
6677 /* mfdcrx */
6678 /* XXX: not implemented on 440 ? */
6679 static void gen_mfdcrx(DisasContext *ctx)
6681 #if defined(CONFIG_USER_ONLY)
6682 GEN_PRIV;
6683 #else
6684 CHK_SV;
6685 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
6686 cpu_gpr[rA(ctx->opcode)]);
6687 /* Note: Rc update flag set leads to undefined state of Rc0 */
6688 #endif /* defined(CONFIG_USER_ONLY) */
6691 /* mtdcrx */
6692 /* XXX: not implemented on 440 ? */
6693 static void gen_mtdcrx(DisasContext *ctx)
6695 #if defined(CONFIG_USER_ONLY)
6696 GEN_PRIV;
6697 #else
6698 CHK_SV;
6699 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
6700 cpu_gpr[rS(ctx->opcode)]);
6701 /* Note: Rc update flag set leads to undefined state of Rc0 */
6702 #endif /* defined(CONFIG_USER_ONLY) */
6705 /* mfdcrux (PPC 460) : user-mode access to DCR */
6706 static void gen_mfdcrux(DisasContext *ctx)
6708 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
6709 cpu_gpr[rA(ctx->opcode)]);
6710 /* Note: Rc update flag set leads to undefined state of Rc0 */
6713 /* mtdcrux (PPC 460) : user-mode access to DCR */
6714 static void gen_mtdcrux(DisasContext *ctx)
6716 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
6717 cpu_gpr[rS(ctx->opcode)]);
6718 /* Note: Rc update flag set leads to undefined state of Rc0 */
6721 /* dccci */
6722 static void gen_dccci(DisasContext *ctx)
6724 CHK_SV;
6725 /* interpreted as no-op */
6728 /* dcread */
6729 static void gen_dcread(DisasContext *ctx)
6731 #if defined(CONFIG_USER_ONLY)
6732 GEN_PRIV;
6733 #else
6734 TCGv EA, val;
6736 CHK_SV;
6737 gen_set_access_type(ctx, ACCESS_CACHE);
6738 EA = tcg_temp_new();
6739 gen_addr_reg_index(ctx, EA);
6740 val = tcg_temp_new();
6741 gen_qemu_ld32u(ctx, val, EA);
6742 tcg_temp_free(val);
6743 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
6744 tcg_temp_free(EA);
6745 #endif /* defined(CONFIG_USER_ONLY) */
6748 /* icbt */
6749 static void gen_icbt_40x(DisasContext *ctx)
6752 * interpreted as no-op
6753 * XXX: specification say this is treated as a load by the MMU but
6754 * does not generate any exception
6758 /* iccci */
6759 static void gen_iccci(DisasContext *ctx)
6761 CHK_SV;
6762 /* interpreted as no-op */
6765 /* icread */
6766 static void gen_icread(DisasContext *ctx)
6768 CHK_SV;
6769 /* interpreted as no-op */
6772 /* rfci (supervisor only) */
6773 static void gen_rfci_40x(DisasContext *ctx)
6775 #if defined(CONFIG_USER_ONLY)
6776 GEN_PRIV;
6777 #else
6778 CHK_SV;
6779 /* Restore CPU state */
6780 gen_helper_40x_rfci(cpu_env);
6781 ctx->base.is_jmp = DISAS_EXIT;
6782 #endif /* defined(CONFIG_USER_ONLY) */
6785 static void gen_rfci(DisasContext *ctx)
6787 #if defined(CONFIG_USER_ONLY)
6788 GEN_PRIV;
6789 #else
6790 CHK_SV;
6791 /* Restore CPU state */
6792 gen_helper_rfci(cpu_env);
6793 ctx->base.is_jmp = DISAS_EXIT;
6794 #endif /* defined(CONFIG_USER_ONLY) */
6797 /* BookE specific */
6799 /* XXX: not implemented on 440 ? */
6800 static void gen_rfdi(DisasContext *ctx)
6802 #if defined(CONFIG_USER_ONLY)
6803 GEN_PRIV;
6804 #else
6805 CHK_SV;
6806 /* Restore CPU state */
6807 gen_helper_rfdi(cpu_env);
6808 ctx->base.is_jmp = DISAS_EXIT;
6809 #endif /* defined(CONFIG_USER_ONLY) */
6812 /* XXX: not implemented on 440 ? */
6813 static void gen_rfmci(DisasContext *ctx)
6815 #if defined(CONFIG_USER_ONLY)
6816 GEN_PRIV;
6817 #else
6818 CHK_SV;
6819 /* Restore CPU state */
6820 gen_helper_rfmci(cpu_env);
6821 ctx->base.is_jmp = DISAS_EXIT;
6822 #endif /* defined(CONFIG_USER_ONLY) */
6825 /* TLB management - PowerPC 405 implementation */
6827 /* tlbre */
6828 static void gen_tlbre_40x(DisasContext *ctx)
6830 #if defined(CONFIG_USER_ONLY)
6831 GEN_PRIV;
6832 #else
6833 CHK_SV;
6834 switch (rB(ctx->opcode)) {
6835 case 0:
6836 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
6837 cpu_gpr[rA(ctx->opcode)]);
6838 break;
6839 case 1:
6840 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
6841 cpu_gpr[rA(ctx->opcode)]);
6842 break;
6843 default:
6844 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6845 break;
6847 #endif /* defined(CONFIG_USER_ONLY) */
6850 /* tlbsx - tlbsx. */
6851 static void gen_tlbsx_40x(DisasContext *ctx)
6853 #if defined(CONFIG_USER_ONLY)
6854 GEN_PRIV;
6855 #else
6856 TCGv t0;
6858 CHK_SV;
6859 t0 = tcg_temp_new();
6860 gen_addr_reg_index(ctx, t0);
6861 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6862 tcg_temp_free(t0);
6863 if (Rc(ctx->opcode)) {
6864 TCGLabel *l1 = gen_new_label();
6865 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6866 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6867 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6868 gen_set_label(l1);
6870 #endif /* defined(CONFIG_USER_ONLY) */
6873 /* tlbwe */
6874 static void gen_tlbwe_40x(DisasContext *ctx)
6876 #if defined(CONFIG_USER_ONLY)
6877 GEN_PRIV;
6878 #else
6879 CHK_SV;
6881 switch (rB(ctx->opcode)) {
6882 case 0:
6883 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6884 cpu_gpr[rS(ctx->opcode)]);
6885 break;
6886 case 1:
6887 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6888 cpu_gpr[rS(ctx->opcode)]);
6889 break;
6890 default:
6891 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6892 break;
6894 #endif /* defined(CONFIG_USER_ONLY) */
6897 /* TLB management - PowerPC 440 implementation */
6899 /* tlbre */
6900 static void gen_tlbre_440(DisasContext *ctx)
6902 #if defined(CONFIG_USER_ONLY)
6903 GEN_PRIV;
6904 #else
6905 CHK_SV;
6907 switch (rB(ctx->opcode)) {
6908 case 0:
6909 case 1:
6910 case 2:
6912 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6913 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6914 t0, cpu_gpr[rA(ctx->opcode)]);
6915 tcg_temp_free_i32(t0);
6917 break;
6918 default:
6919 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6920 break;
6922 #endif /* defined(CONFIG_USER_ONLY) */
6925 /* tlbsx - tlbsx. */
6926 static void gen_tlbsx_440(DisasContext *ctx)
6928 #if defined(CONFIG_USER_ONLY)
6929 GEN_PRIV;
6930 #else
6931 TCGv t0;
6933 CHK_SV;
6934 t0 = tcg_temp_new();
6935 gen_addr_reg_index(ctx, t0);
6936 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6937 tcg_temp_free(t0);
6938 if (Rc(ctx->opcode)) {
6939 TCGLabel *l1 = gen_new_label();
6940 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6941 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6942 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6943 gen_set_label(l1);
6945 #endif /* defined(CONFIG_USER_ONLY) */
6948 /* tlbwe */
6949 static void gen_tlbwe_440(DisasContext *ctx)
6951 #if defined(CONFIG_USER_ONLY)
6952 GEN_PRIV;
6953 #else
6954 CHK_SV;
6955 switch (rB(ctx->opcode)) {
6956 case 0:
6957 case 1:
6958 case 2:
6960 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6961 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6962 cpu_gpr[rS(ctx->opcode)]);
6963 tcg_temp_free_i32(t0);
6965 break;
6966 default:
6967 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6968 break;
6970 #endif /* defined(CONFIG_USER_ONLY) */
6973 /* TLB management - PowerPC BookE 2.06 implementation */
6975 /* tlbre */
6976 static void gen_tlbre_booke206(DisasContext *ctx)
6978 #if defined(CONFIG_USER_ONLY)
6979 GEN_PRIV;
6980 #else
6981 CHK_SV;
6982 gen_helper_booke206_tlbre(cpu_env);
6983 #endif /* defined(CONFIG_USER_ONLY) */
6986 /* tlbsx - tlbsx. */
6987 static void gen_tlbsx_booke206(DisasContext *ctx)
6989 #if defined(CONFIG_USER_ONLY)
6990 GEN_PRIV;
6991 #else
6992 TCGv t0;
6994 CHK_SV;
6995 if (rA(ctx->opcode)) {
6996 t0 = tcg_temp_new();
6997 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6998 } else {
6999 t0 = tcg_const_tl(0);
7002 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
7003 gen_helper_booke206_tlbsx(cpu_env, t0);
7004 tcg_temp_free(t0);
7005 #endif /* defined(CONFIG_USER_ONLY) */
7008 /* tlbwe */
7009 static void gen_tlbwe_booke206(DisasContext *ctx)
7011 #if defined(CONFIG_USER_ONLY)
7012 GEN_PRIV;
7013 #else
7014 CHK_SV;
7015 gen_helper_booke206_tlbwe(cpu_env);
7016 #endif /* defined(CONFIG_USER_ONLY) */
7019 static void gen_tlbivax_booke206(DisasContext *ctx)
7021 #if defined(CONFIG_USER_ONLY)
7022 GEN_PRIV;
7023 #else
7024 TCGv t0;
7026 CHK_SV;
7027 t0 = tcg_temp_new();
7028 gen_addr_reg_index(ctx, t0);
7029 gen_helper_booke206_tlbivax(cpu_env, t0);
7030 tcg_temp_free(t0);
7031 #endif /* defined(CONFIG_USER_ONLY) */
7034 static void gen_tlbilx_booke206(DisasContext *ctx)
7036 #if defined(CONFIG_USER_ONLY)
7037 GEN_PRIV;
7038 #else
7039 TCGv t0;
7041 CHK_SV;
7042 t0 = tcg_temp_new();
7043 gen_addr_reg_index(ctx, t0);
7045 switch ((ctx->opcode >> 21) & 0x3) {
7046 case 0:
7047 gen_helper_booke206_tlbilx0(cpu_env, t0);
7048 break;
7049 case 1:
7050 gen_helper_booke206_tlbilx1(cpu_env, t0);
7051 break;
7052 case 3:
7053 gen_helper_booke206_tlbilx3(cpu_env, t0);
7054 break;
7055 default:
7056 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7057 break;
7060 tcg_temp_free(t0);
7061 #endif /* defined(CONFIG_USER_ONLY) */
7065 /* wrtee */
7066 static void gen_wrtee(DisasContext *ctx)
7068 #if defined(CONFIG_USER_ONLY)
7069 GEN_PRIV;
7070 #else
7071 TCGv t0;
7073 CHK_SV;
7074 t0 = tcg_temp_new();
7075 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
7076 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7077 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
7078 tcg_temp_free(t0);
7080 * Stop translation to have a chance to raise an exception if we
7081 * just set msr_ee to 1
7083 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
7084 #endif /* defined(CONFIG_USER_ONLY) */
7087 /* wrteei */
7088 static void gen_wrteei(DisasContext *ctx)
7090 #if defined(CONFIG_USER_ONLY)
7091 GEN_PRIV;
7092 #else
7093 CHK_SV;
7094 if (ctx->opcode & 0x00008000) {
7095 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
7096 /* Stop translation to have a chance to raise an exception */
7097 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
7098 } else {
7099 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7101 #endif /* defined(CONFIG_USER_ONLY) */
7104 /* PowerPC 440 specific instructions */
7106 /* dlmzb */
7107 static void gen_dlmzb(DisasContext *ctx)
7109 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
7110 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
7111 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
7112 tcg_temp_free_i32(t0);
7115 /* mbar replaces eieio on 440 */
7116 static void gen_mbar(DisasContext *ctx)
7118 /* interpreted as no-op */
7121 /* msync replaces sync on 440 */
7122 static void gen_msync_4xx(DisasContext *ctx)
7124 /* Only e500 seems to treat reserved bits as invalid */
7125 if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
7126 (ctx->opcode & 0x03FFF801)) {
7127 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7129 /* otherwise interpreted as no-op */
7132 /* icbt */
7133 static void gen_icbt_440(DisasContext *ctx)
7136 * interpreted as no-op
7137 * XXX: specification say this is treated as a load by the MMU but
7138 * does not generate any exception
7142 /* Embedded.Processor Control */
7144 static void gen_msgclr(DisasContext *ctx)
7146 #if defined(CONFIG_USER_ONLY)
7147 GEN_PRIV;
7148 #else
7149 CHK_HV;
7150 if (is_book3s_arch2x(ctx)) {
7151 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7152 } else {
7153 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7155 #endif /* defined(CONFIG_USER_ONLY) */
7158 static void gen_msgsnd(DisasContext *ctx)
7160 #if defined(CONFIG_USER_ONLY)
7161 GEN_PRIV;
7162 #else
7163 CHK_HV;
7164 if (is_book3s_arch2x(ctx)) {
7165 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7166 } else {
7167 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7169 #endif /* defined(CONFIG_USER_ONLY) */
7172 #if defined(TARGET_PPC64)
7173 static void gen_msgclrp(DisasContext *ctx)
7175 #if defined(CONFIG_USER_ONLY)
7176 GEN_PRIV;
7177 #else
7178 CHK_SV;
7179 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7180 #endif /* defined(CONFIG_USER_ONLY) */
7183 static void gen_msgsndp(DisasContext *ctx)
7185 #if defined(CONFIG_USER_ONLY)
7186 GEN_PRIV;
7187 #else
7188 CHK_SV;
7189 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7190 #endif /* defined(CONFIG_USER_ONLY) */
7192 #endif
7194 static void gen_msgsync(DisasContext *ctx)
7196 #if defined(CONFIG_USER_ONLY)
7197 GEN_PRIV;
7198 #else
7199 CHK_HV;
7200 #endif /* defined(CONFIG_USER_ONLY) */
7201 /* interpreted as no-op */
7204 #if defined(TARGET_PPC64)
7205 static void gen_maddld(DisasContext *ctx)
7207 TCGv_i64 t1 = tcg_temp_new_i64();
7209 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7210 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
7211 tcg_temp_free_i64(t1);
7214 /* maddhd maddhdu */
7215 static void gen_maddhd_maddhdu(DisasContext *ctx)
7217 TCGv_i64 lo = tcg_temp_new_i64();
7218 TCGv_i64 hi = tcg_temp_new_i64();
7219 TCGv_i64 t1 = tcg_temp_new_i64();
7221 if (Rc(ctx->opcode)) {
7222 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7223 cpu_gpr[rB(ctx->opcode)]);
7224 tcg_gen_movi_i64(t1, 0);
7225 } else {
7226 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7227 cpu_gpr[rB(ctx->opcode)]);
7228 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
7230 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
7231 cpu_gpr[rC(ctx->opcode)], t1);
7232 tcg_temp_free_i64(lo);
7233 tcg_temp_free_i64(hi);
7234 tcg_temp_free_i64(t1);
7236 #endif /* defined(TARGET_PPC64) */
7238 static void gen_tbegin(DisasContext *ctx)
7240 if (unlikely(!ctx->tm_enabled)) {
7241 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7242 return;
7244 gen_helper_tbegin(cpu_env);
7247 #define GEN_TM_NOOP(name) \
7248 static inline void gen_##name(DisasContext *ctx) \
7250 if (unlikely(!ctx->tm_enabled)) { \
7251 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7252 return; \
7254 /* \
7255 * Because tbegin always fails in QEMU, these user \
7256 * space instructions all have a simple implementation: \
7258 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7259 * = 0b0 || 0b00 || 0b0 \
7260 */ \
7261 tcg_gen_movi_i32(cpu_crf[0], 0); \
7264 GEN_TM_NOOP(tend);
7265 GEN_TM_NOOP(tabort);
7266 GEN_TM_NOOP(tabortwc);
7267 GEN_TM_NOOP(tabortwci);
7268 GEN_TM_NOOP(tabortdc);
7269 GEN_TM_NOOP(tabortdci);
7270 GEN_TM_NOOP(tsr);
7272 static inline void gen_cp_abort(DisasContext *ctx)
7274 /* Do Nothing */
7277 #define GEN_CP_PASTE_NOOP(name) \
7278 static inline void gen_##name(DisasContext *ctx) \
7280 /* \
7281 * Generate invalid exception until we have an \
7282 * implementation of the copy paste facility \
7283 */ \
7284 gen_invalid(ctx); \
7287 GEN_CP_PASTE_NOOP(copy)
7288 GEN_CP_PASTE_NOOP(paste)
7290 static void gen_tcheck(DisasContext *ctx)
7292 if (unlikely(!ctx->tm_enabled)) {
7293 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7294 return;
7297 * Because tbegin always fails, the tcheck implementation is
7298 * simple:
7300 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
7301 * = 0b1 || 0b00 || 0b0
7303 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
7306 #if defined(CONFIG_USER_ONLY)
7307 #define GEN_TM_PRIV_NOOP(name) \
7308 static inline void gen_##name(DisasContext *ctx) \
7310 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
7313 #else
7315 #define GEN_TM_PRIV_NOOP(name) \
7316 static inline void gen_##name(DisasContext *ctx) \
7318 CHK_SV; \
7319 if (unlikely(!ctx->tm_enabled)) { \
7320 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7321 return; \
7323 /* \
7324 * Because tbegin always fails, the implementation is \
7325 * simple: \
7327 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7328 * = 0b0 || 0b00 | 0b0 \
7329 */ \
7330 tcg_gen_movi_i32(cpu_crf[0], 0); \
7333 #endif
7335 GEN_TM_PRIV_NOOP(treclaim);
7336 GEN_TM_PRIV_NOOP(trechkpt);
7338 static inline void get_fpr(TCGv_i64 dst, int regno)
7340 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
7343 static inline void set_fpr(int regno, TCGv_i64 src)
7345 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
7348 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
7350 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
7353 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
7355 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
7359 * Helpers for decodetree used by !function for decoding arguments.
7361 static int times_2(DisasContext *ctx, int x)
7363 return x * 2;
7366 static int times_4(DisasContext *ctx, int x)
7368 return x * 4;
7371 static int times_16(DisasContext *ctx, int x)
7373 return x * 16;
7377 * Helpers for trans_* functions to check for specific insns flags.
7378 * Use token pasting to ensure that we use the proper flag with the
7379 * proper variable.
7381 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
7382 do { \
7383 if (((CTX)->insns_flags & PPC_##NAME) == 0) { \
7384 return false; \
7386 } while (0)
7388 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
7389 do { \
7390 if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
7391 return false; \
7393 } while (0)
7395 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
7396 #if TARGET_LONG_BITS == 32
7397 # define REQUIRE_64BIT(CTX) return false
7398 #else
7399 # define REQUIRE_64BIT(CTX) REQUIRE_INSNS_FLAGS(CTX, 64B)
7400 #endif
7402 #define REQUIRE_VECTOR(CTX) \
7403 do { \
7404 if (unlikely(!(CTX)->altivec_enabled)) { \
7405 gen_exception((CTX), POWERPC_EXCP_VPU); \
7406 return true; \
7408 } while (0)
7410 #define REQUIRE_VSX(CTX) \
7411 do { \
7412 if (unlikely(!(CTX)->vsx_enabled)) { \
7413 gen_exception((CTX), POWERPC_EXCP_VSXU); \
7414 return true; \
7416 } while (0)
7418 #define REQUIRE_FPU(ctx) \
7419 do { \
7420 if (unlikely(!(ctx)->fpu_enabled)) { \
7421 gen_exception((ctx), POWERPC_EXCP_FPU); \
7422 return true; \
7424 } while (0)
7427 * Helpers for implementing sets of trans_* functions.
7428 * Defer the implementation of NAME to FUNC, with optional extra arguments.
7430 #define TRANS(NAME, FUNC, ...) \
7431 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
7432 { return FUNC(ctx, a, __VA_ARGS__); }
7434 #define TRANS64(NAME, FUNC, ...) \
7435 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
7436 { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
7438 /* TODO: More TRANS* helpers for extra insn_flags checks. */
7441 #include "decode-insn32.c.inc"
7442 #include "decode-insn64.c.inc"
7443 #include "power8-pmu-regs.c.inc"
7446 * Incorporate CIA into the constant when R=1.
7447 * Validate that when R=1, RA=0.
7449 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
7451 d->rt = a->rt;
7452 d->ra = a->ra;
7453 d->si = a->si;
7454 if (a->r) {
7455 if (unlikely(a->ra != 0)) {
7456 gen_invalid(ctx);
7457 return false;
7459 d->si += ctx->cia;
7461 return true;
7464 #include "translate/fixedpoint-impl.c.inc"
7466 #include "translate/fp-impl.c.inc"
7468 #include "translate/vmx-impl.c.inc"
7470 #include "translate/vsx-impl.c.inc"
7472 #include "translate/dfp-impl.c.inc"
7474 #include "translate/spe-impl.c.inc"
7476 #include "translate/branch-impl.c.inc"
7478 /* Handles lfdp, lxsd, lxssp */
7479 static void gen_dform39(DisasContext *ctx)
7481 switch (ctx->opcode & 0x3) {
7482 case 0: /* lfdp */
7483 if (ctx->insns_flags2 & PPC2_ISA205) {
7484 return gen_lfdp(ctx);
7486 break;
7487 case 2: /* lxsd */
7488 if (ctx->insns_flags2 & PPC2_ISA300) {
7489 return gen_lxsd(ctx);
7491 break;
7492 case 3: /* lxssp */
7493 if (ctx->insns_flags2 & PPC2_ISA300) {
7494 return gen_lxssp(ctx);
7496 break;
7498 return gen_invalid(ctx);
7501 /* handles stfdp, lxv, stxsd, stxssp lxvx */
7502 static void gen_dform3D(DisasContext *ctx)
7504 if ((ctx->opcode & 3) != 1) { /* DS-FORM */
7505 switch (ctx->opcode & 0x3) {
7506 case 0: /* stfdp */
7507 if (ctx->insns_flags2 & PPC2_ISA205) {
7508 return gen_stfdp(ctx);
7510 break;
7511 case 2: /* stxsd */
7512 if (ctx->insns_flags2 & PPC2_ISA300) {
7513 return gen_stxsd(ctx);
7515 break;
7516 case 3: /* stxssp */
7517 if (ctx->insns_flags2 & PPC2_ISA300) {
7518 return gen_stxssp(ctx);
7520 break;
7523 return gen_invalid(ctx);
7526 #if defined(TARGET_PPC64)
7527 /* brd */
7528 static void gen_brd(DisasContext *ctx)
7530 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7533 /* brw */
7534 static void gen_brw(DisasContext *ctx)
7536 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7537 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
7541 /* brh */
7542 static void gen_brh(DisasContext *ctx)
7544 TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
7545 TCGv_i64 t1 = tcg_temp_new_i64();
7546 TCGv_i64 t2 = tcg_temp_new_i64();
7548 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
7549 tcg_gen_and_i64(t2, t1, mask);
7550 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
7551 tcg_gen_shli_i64(t1, t1, 8);
7552 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
7554 tcg_temp_free_i64(t1);
7555 tcg_temp_free_i64(t2);
7557 #endif
7559 static opcode_t opcodes[] = {
7560 #if defined(TARGET_PPC64)
7561 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
7562 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
7563 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
7564 #endif
7565 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
7566 #if defined(TARGET_PPC64)
7567 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
7568 #endif
7569 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
7570 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
7571 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
7572 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7573 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7574 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
7575 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
7576 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
7577 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
7578 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7579 #if defined(TARGET_PPC64)
7580 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
7581 #endif
7582 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
7583 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
7584 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7585 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7586 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7587 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
7588 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
7589 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
7590 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7591 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
7592 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
7593 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
7594 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7595 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7596 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7597 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7598 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
7599 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
7600 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
7601 #if defined(TARGET_PPC64)
7602 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
7603 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
7604 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
7605 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
7606 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
7607 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
7608 #endif
7609 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7610 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7611 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7612 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
7613 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
7614 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
7615 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
7616 #if defined(TARGET_PPC64)
7617 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
7618 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
7619 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
7620 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
7621 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
7622 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
7623 PPC_NONE, PPC2_ISA300),
7624 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
7625 PPC_NONE, PPC2_ISA300),
7626 #endif
7627 /* handles lfdp, lxsd, lxssp */
7628 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
7629 /* handles stfdp, stxsd, stxssp */
7630 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
7631 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7632 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7633 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
7634 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
7635 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
7636 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
7637 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
7638 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
7639 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7640 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7641 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
7642 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
7643 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
7644 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7645 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7646 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
7647 #if defined(TARGET_PPC64)
7648 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
7649 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
7650 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
7651 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
7652 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
7653 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
7654 #endif
7655 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
7656 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
7657 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300),
7658 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7659 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7660 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
7661 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
7662 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
7663 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
7664 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
7665 #if defined(TARGET_PPC64)
7666 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
7667 #if !defined(CONFIG_USER_ONLY)
7668 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
7669 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
7670 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
7671 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
7672 #endif
7673 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7674 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7675 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7676 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7677 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7678 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
7679 #endif
7680 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
7681 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
7682 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
7683 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
7684 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7685 #if defined(TARGET_PPC64)
7686 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
7687 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
7688 #endif
7689 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
7690 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
7691 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
7692 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
7693 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
7694 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
7695 #if defined(TARGET_PPC64)
7696 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
7697 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
7698 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
7699 #endif
7700 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
7701 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
7702 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
7703 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
7704 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
7705 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
7706 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
7707 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
7708 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
7709 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
7710 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
7711 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
7712 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
7713 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
7714 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
7715 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
7716 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
7717 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
7718 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
7719 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
7720 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
7721 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
7722 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
7723 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
7724 #if defined(TARGET_PPC64)
7725 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
7726 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
7727 PPC_SEGMENT_64B),
7728 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
7729 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
7730 PPC_SEGMENT_64B),
7731 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
7732 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
7733 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
7734 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
7735 #endif
7736 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
7738 * XXX Those instructions will need to be handled differently for
7739 * different ISA versions
7741 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
7742 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
7743 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300),
7744 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300),
7745 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
7746 #if defined(TARGET_PPC64)
7747 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
7748 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
7749 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300),
7750 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7751 #endif
7752 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
7753 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
7754 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
7755 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
7756 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
7757 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
7758 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
7759 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
7760 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
7761 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
7762 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
7763 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
7764 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
7765 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
7766 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
7767 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
7768 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
7769 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
7770 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
7771 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
7772 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
7773 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
7774 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
7775 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
7776 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
7777 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
7778 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
7779 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
7780 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
7781 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
7782 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
7783 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
7784 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
7785 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
7786 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
7787 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
7788 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
7789 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
7790 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
7791 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
7792 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
7793 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
7794 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
7795 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
7796 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
7797 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
7798 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
7799 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
7800 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
7801 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
7802 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
7803 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
7804 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
7805 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
7806 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
7807 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
7808 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
7809 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
7810 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
7811 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
7812 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
7813 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
7814 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
7815 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
7816 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
7817 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
7818 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
7819 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
7820 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
7821 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
7822 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
7823 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
7824 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
7825 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
7826 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
7827 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
7828 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
7829 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
7830 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
7831 PPC_NONE, PPC2_BOOKE206),
7832 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
7833 PPC_NONE, PPC2_BOOKE206),
7834 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
7835 PPC_NONE, PPC2_BOOKE206),
7836 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
7837 PPC_NONE, PPC2_BOOKE206),
7838 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
7839 PPC_NONE, PPC2_BOOKE206),
7840 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
7841 PPC_NONE, PPC2_PRCNTL),
7842 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
7843 PPC_NONE, PPC2_PRCNTL),
7844 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000,
7845 PPC_NONE, PPC2_PRCNTL),
7846 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
7847 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
7848 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
7849 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
7850 PPC_BOOKE, PPC2_BOOKE206),
7851 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
7852 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
7853 PPC_BOOKE, PPC2_BOOKE206),
7854 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
7855 PPC_440_SPEC),
7856 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
7857 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
7858 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
7859 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
7860 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
7861 #if defined(TARGET_PPC64)
7862 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
7863 PPC2_ISA300),
7864 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
7865 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001,
7866 PPC_NONE, PPC2_ISA207S),
7867 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001,
7868 PPC_NONE, PPC2_ISA207S),
7869 #endif
7871 #undef GEN_INT_ARITH_ADD
7872 #undef GEN_INT_ARITH_ADD_CONST
7873 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
7874 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
7875 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
7876 add_ca, compute_ca, compute_ov) \
7877 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
7878 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
7879 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
7880 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
7881 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
7882 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
7883 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
7884 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
7885 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
7886 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
7887 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
7888 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
7890 #undef GEN_INT_ARITH_DIVW
7891 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
7892 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
7893 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
7894 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
7895 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
7896 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
7897 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7898 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7899 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7900 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7901 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
7902 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
7904 #if defined(TARGET_PPC64)
7905 #undef GEN_INT_ARITH_DIVD
7906 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
7907 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
7908 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
7909 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
7910 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
7911 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
7913 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7914 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7915 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7916 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7917 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
7918 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
7920 #undef GEN_INT_ARITH_MUL_HELPER
7921 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
7922 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
7923 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
7924 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
7925 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
7926 #endif
7928 #undef GEN_INT_ARITH_SUBF
7929 #undef GEN_INT_ARITH_SUBF_CONST
7930 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
7931 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
7932 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
7933 add_ca, compute_ca, compute_ov) \
7934 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
7935 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
7936 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
7937 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
7938 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
7939 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
7940 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
7941 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
7942 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
7943 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
7944 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
7946 #undef GEN_LOGICAL1
7947 #undef GEN_LOGICAL2
7948 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
7949 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
7950 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
7951 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
7952 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
7953 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
7954 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
7955 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
7956 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
7957 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
7958 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
7959 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
7960 #if defined(TARGET_PPC64)
7961 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
7962 #endif
7964 #if defined(TARGET_PPC64)
7965 #undef GEN_PPC64_R2
7966 #undef GEN_PPC64_R4
7967 #define GEN_PPC64_R2(name, opc1, opc2) \
7968 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7969 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
7970 PPC_64B)
7971 #define GEN_PPC64_R4(name, opc1, opc2) \
7972 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7973 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
7974 PPC_64B), \
7975 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
7976 PPC_64B), \
7977 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
7978 PPC_64B)
7979 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
7980 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
7981 GEN_PPC64_R4(rldic, 0x1E, 0x04),
7982 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
7983 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
7984 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
7985 #endif
7987 #undef GEN_LDX_E
7988 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
7989 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
7991 #if defined(TARGET_PPC64)
7992 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
7994 /* HV/P7 and later only */
7995 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
7996 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
7997 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
7998 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
7999 #endif
8000 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8001 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8003 /* External PID based load */
8004 #undef GEN_LDEPX
8005 #define GEN_LDEPX(name, ldop, opc2, opc3) \
8006 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8007 0x00000001, PPC_NONE, PPC2_BOOKE206),
8009 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
8010 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
8011 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
8012 #if defined(TARGET_PPC64)
8013 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
8014 #endif
8016 #undef GEN_STX_E
8017 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
8018 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
8020 #if defined(TARGET_PPC64)
8021 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
8022 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
8023 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
8024 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
8025 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
8026 #endif
8027 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8028 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8030 #undef GEN_STEPX
8031 #define GEN_STEPX(name, ldop, opc2, opc3) \
8032 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8033 0x00000001, PPC_NONE, PPC2_BOOKE206),
8035 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
8036 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
8037 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
8038 #if defined(TARGET_PPC64)
8039 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04)
8040 #endif
8042 #undef GEN_CRLOGIC
8043 #define GEN_CRLOGIC(name, tcg_op, opc) \
8044 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8045 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8046 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8047 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8048 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8049 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8050 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8051 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8052 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8054 #undef GEN_MAC_HANDLER
8055 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8056 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8057 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8058 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8059 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8060 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8061 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8062 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8063 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8064 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8065 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8066 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8067 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8068 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8069 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8070 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8071 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8072 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8073 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8074 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8075 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8076 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8077 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8078 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8079 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8080 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8081 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8082 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8083 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8084 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8085 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8086 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8087 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8088 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8089 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8090 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8091 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8092 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8093 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8094 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8095 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8096 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8097 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8098 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8100 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
8101 PPC_NONE, PPC2_TM),
8102 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
8103 PPC_NONE, PPC2_TM),
8104 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
8105 PPC_NONE, PPC2_TM),
8106 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
8107 PPC_NONE, PPC2_TM),
8108 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
8109 PPC_NONE, PPC2_TM),
8110 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
8111 PPC_NONE, PPC2_TM),
8112 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
8113 PPC_NONE, PPC2_TM),
8114 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
8115 PPC_NONE, PPC2_TM),
8116 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
8117 PPC_NONE, PPC2_TM),
8118 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
8119 PPC_NONE, PPC2_TM),
8120 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
8121 PPC_NONE, PPC2_TM),
8123 #include "translate/fp-ops.c.inc"
8125 #include "translate/vmx-ops.c.inc"
8127 #include "translate/vsx-ops.c.inc"
8129 #include "translate/spe-ops.c.inc"
8132 /*****************************************************************************/
8133 /* Opcode types */
8134 enum {
8135 PPC_DIRECT = 0, /* Opcode routine */
8136 PPC_INDIRECT = 1, /* Indirect opcode table */
8139 #define PPC_OPCODE_MASK 0x3
8141 static inline int is_indirect_opcode(void *handler)
8143 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
8146 static inline opc_handler_t **ind_table(void *handler)
8148 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
8151 /* Instruction table creation */
8152 /* Opcodes tables creation */
8153 static void fill_new_table(opc_handler_t **table, int len)
8155 int i;
8157 for (i = 0; i < len; i++) {
8158 table[i] = &invalid_handler;
8162 static int create_new_table(opc_handler_t **table, unsigned char idx)
8164 opc_handler_t **tmp;
8166 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
8167 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
8168 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
8170 return 0;
8173 static int insert_in_table(opc_handler_t **table, unsigned char idx,
8174 opc_handler_t *handler)
8176 if (table[idx] != &invalid_handler) {
8177 return -1;
8179 table[idx] = handler;
8181 return 0;
8184 static int register_direct_insn(opc_handler_t **ppc_opcodes,
8185 unsigned char idx, opc_handler_t *handler)
8187 if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
8188 printf("*** ERROR: opcode %02x already assigned in main "
8189 "opcode table\n", idx);
8190 return -1;
8193 return 0;
8196 static int register_ind_in_table(opc_handler_t **table,
8197 unsigned char idx1, unsigned char idx2,
8198 opc_handler_t *handler)
8200 if (table[idx1] == &invalid_handler) {
8201 if (create_new_table(table, idx1) < 0) {
8202 printf("*** ERROR: unable to create indirect table "
8203 "idx=%02x\n", idx1);
8204 return -1;
8206 } else {
8207 if (!is_indirect_opcode(table[idx1])) {
8208 printf("*** ERROR: idx %02x already assigned to a direct "
8209 "opcode\n", idx1);
8210 return -1;
8213 if (handler != NULL &&
8214 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
8215 printf("*** ERROR: opcode %02x already assigned in "
8216 "opcode table %02x\n", idx2, idx1);
8217 return -1;
8220 return 0;
8223 static int register_ind_insn(opc_handler_t **ppc_opcodes,
8224 unsigned char idx1, unsigned char idx2,
8225 opc_handler_t *handler)
8227 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
8230 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
8231 unsigned char idx1, unsigned char idx2,
8232 unsigned char idx3, opc_handler_t *handler)
8234 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8235 printf("*** ERROR: unable to join indirect table idx "
8236 "[%02x-%02x]\n", idx1, idx2);
8237 return -1;
8239 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
8240 handler) < 0) {
8241 printf("*** ERROR: unable to insert opcode "
8242 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8243 return -1;
8246 return 0;
8249 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
8250 unsigned char idx1, unsigned char idx2,
8251 unsigned char idx3, unsigned char idx4,
8252 opc_handler_t *handler)
8254 opc_handler_t **table;
8256 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8257 printf("*** ERROR: unable to join indirect table idx "
8258 "[%02x-%02x]\n", idx1, idx2);
8259 return -1;
8261 table = ind_table(ppc_opcodes[idx1]);
8262 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
8263 printf("*** ERROR: unable to join 2nd-level indirect table idx "
8264 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8265 return -1;
8267 table = ind_table(table[idx2]);
8268 if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
8269 printf("*** ERROR: unable to insert opcode "
8270 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
8271 return -1;
8273 return 0;
8275 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
8277 if (insn->opc2 != 0xFF) {
8278 if (insn->opc3 != 0xFF) {
8279 if (insn->opc4 != 0xFF) {
8280 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8281 insn->opc3, insn->opc4,
8282 &insn->handler) < 0) {
8283 return -1;
8285 } else {
8286 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8287 insn->opc3, &insn->handler) < 0) {
8288 return -1;
8291 } else {
8292 if (register_ind_insn(ppc_opcodes, insn->opc1,
8293 insn->opc2, &insn->handler) < 0) {
8294 return -1;
8297 } else {
8298 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
8299 return -1;
8303 return 0;
8306 static int test_opcode_table(opc_handler_t **table, int len)
8308 int i, count, tmp;
8310 for (i = 0, count = 0; i < len; i++) {
8311 /* Consistency fixup */
8312 if (table[i] == NULL) {
8313 table[i] = &invalid_handler;
8315 if (table[i] != &invalid_handler) {
8316 if (is_indirect_opcode(table[i])) {
8317 tmp = test_opcode_table(ind_table(table[i]),
8318 PPC_CPU_INDIRECT_OPCODES_LEN);
8319 if (tmp == 0) {
8320 free(table[i]);
8321 table[i] = &invalid_handler;
8322 } else {
8323 count++;
8325 } else {
8326 count++;
8331 return count;
8334 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
8336 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
8337 printf("*** WARNING: no opcode defined !\n");
8341 /*****************************************************************************/
8342 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
8344 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
8345 opcode_t *opc;
8347 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
8348 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
8349 if (((opc->handler.type & pcc->insns_flags) != 0) ||
8350 ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
8351 if (register_insn(cpu->opcodes, opc) < 0) {
8352 error_setg(errp, "ERROR initializing PowerPC instruction "
8353 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
8354 opc->opc3);
8355 return;
8359 fix_opcode_tables(cpu->opcodes);
8360 fflush(stdout);
8361 fflush(stderr);
8364 void destroy_ppc_opcodes(PowerPCCPU *cpu)
8366 opc_handler_t **table, **table_2;
8367 int i, j, k;
8369 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
8370 if (cpu->opcodes[i] == &invalid_handler) {
8371 continue;
8373 if (is_indirect_opcode(cpu->opcodes[i])) {
8374 table = ind_table(cpu->opcodes[i]);
8375 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
8376 if (table[j] == &invalid_handler) {
8377 continue;
8379 if (is_indirect_opcode(table[j])) {
8380 table_2 = ind_table(table[j]);
8381 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
8382 if (table_2[k] != &invalid_handler &&
8383 is_indirect_opcode(table_2[k])) {
8384 g_free((opc_handler_t *)((uintptr_t)table_2[k] &
8385 ~PPC_INDIRECT));
8388 g_free((opc_handler_t *)((uintptr_t)table[j] &
8389 ~PPC_INDIRECT));
8392 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
8393 ~PPC_INDIRECT));
8398 int ppc_fixup_cpu(PowerPCCPU *cpu)
8400 CPUPPCState *env = &cpu->env;
8403 * TCG doesn't (yet) emulate some groups of instructions that are
8404 * implemented on some otherwise supported CPUs (e.g. VSX and
8405 * decimal floating point instructions on POWER7). We remove
8406 * unsupported instruction groups from the cpu state's instruction
8407 * masks and hope the guest can cope. For at least the pseries
8408 * machine, the unavailability of these instructions can be
8409 * advertised to the guest via the device tree.
8411 if ((env->insns_flags & ~PPC_TCG_INSNS)
8412 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
8413 warn_report("Disabling some instructions which are not "
8414 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
8415 env->insns_flags & ~PPC_TCG_INSNS,
8416 env->insns_flags2 & ~PPC_TCG_INSNS2);
8418 env->insns_flags &= PPC_TCG_INSNS;
8419 env->insns_flags2 &= PPC_TCG_INSNS2;
8420 return 0;
8423 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
8425 opc_handler_t **table, *handler;
8426 uint32_t inval;
8428 ctx->opcode = insn;
8430 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
8431 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8432 ctx->le_mode ? "little" : "big");
8434 table = cpu->opcodes;
8435 handler = table[opc1(insn)];
8436 if (is_indirect_opcode(handler)) {
8437 table = ind_table(handler);
8438 handler = table[opc2(insn)];
8439 if (is_indirect_opcode(handler)) {
8440 table = ind_table(handler);
8441 handler = table[opc3(insn)];
8442 if (is_indirect_opcode(handler)) {
8443 table = ind_table(handler);
8444 handler = table[opc4(insn)];
8449 /* Is opcode *REALLY* valid ? */
8450 if (unlikely(handler->handler == &gen_invalid)) {
8451 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
8452 "%02x - %02x - %02x - %02x (%08x) "
8453 TARGET_FMT_lx "\n",
8454 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8455 insn, ctx->cia);
8456 return false;
8459 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
8460 && Rc(insn))) {
8461 inval = handler->inval2;
8462 } else {
8463 inval = handler->inval1;
8466 if (unlikely((insn & inval) != 0)) {
8467 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
8468 "%02x - %02x - %02x - %02x (%08x) "
8469 TARGET_FMT_lx "\n", insn & inval,
8470 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8471 insn, ctx->cia);
8472 return false;
8475 handler->handler(ctx);
8476 return true;
8479 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
8481 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8482 CPUPPCState *env = cs->env_ptr;
8483 uint32_t hflags = ctx->base.tb->flags;
8485 ctx->spr_cb = env->spr_cb;
8486 ctx->pr = (hflags >> HFLAGS_PR) & 1;
8487 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
8488 ctx->dr = (hflags >> HFLAGS_DR) & 1;
8489 ctx->hv = (hflags >> HFLAGS_HV) & 1;
8490 ctx->insns_flags = env->insns_flags;
8491 ctx->insns_flags2 = env->insns_flags2;
8492 ctx->access_type = -1;
8493 ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
8494 ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
8495 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
8496 ctx->flags = env->flags;
8497 #if defined(TARGET_PPC64)
8498 ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
8499 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
8500 #endif
8501 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
8502 || env->mmu_model == POWERPC_MMU_601
8503 || env->mmu_model & POWERPC_MMU_64;
8505 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
8506 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
8507 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
8508 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
8509 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
8510 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
8511 ctx->hr = (hflags >> HFLAGS_HR) & 1;
8512 ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
8513 ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
8514 ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
8516 ctx->singlestep_enabled = 0;
8517 if ((hflags >> HFLAGS_SE) & 1) {
8518 ctx->singlestep_enabled |= CPU_SINGLE_STEP;
8519 ctx->base.max_insns = 1;
8521 if ((hflags >> HFLAGS_BE) & 1) {
8522 ctx->singlestep_enabled |= CPU_BRANCH_STEP;
8526 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
8530 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
8532 tcg_gen_insn_start(dcbase->pc_next);
8535 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
8537 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
8538 return opc1(insn) == 1;
8541 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
8543 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8544 PowerPCCPU *cpu = POWERPC_CPU(cs);
8545 CPUPPCState *env = cs->env_ptr;
8546 target_ulong pc;
8547 uint32_t insn;
8548 bool ok;
8550 LOG_DISAS("----------------\n");
8551 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
8552 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
8554 ctx->cia = pc = ctx->base.pc_next;
8555 insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
8556 ctx->base.pc_next = pc += 4;
8558 if (!is_prefix_insn(ctx, insn)) {
8559 ok = (decode_insn32(ctx, insn) ||
8560 decode_legacy(cpu, ctx, insn));
8561 } else if ((pc & 63) == 0) {
8563 * Power v3.1, section 1.9 Exceptions:
8564 * attempt to execute a prefixed instruction that crosses a
8565 * 64-byte address boundary (system alignment error).
8567 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
8568 ok = true;
8569 } else {
8570 uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
8571 need_byteswap(ctx));
8572 ctx->base.pc_next = pc += 4;
8573 ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
8575 if (!ok) {
8576 gen_invalid(ctx);
8579 /* End the TB when crossing a page boundary. */
8580 if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
8581 ctx->base.is_jmp = DISAS_TOO_MANY;
8584 translator_loop_temp_check(&ctx->base);
8587 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
8589 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8590 DisasJumpType is_jmp = ctx->base.is_jmp;
8591 target_ulong nip = ctx->base.pc_next;
8593 if (is_jmp == DISAS_NORETURN) {
8594 /* We have already exited the TB. */
8595 return;
8598 /* Honor single stepping. */
8599 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
8600 && (nip <= 0x100 || nip > 0xf00)) {
8601 switch (is_jmp) {
8602 case DISAS_TOO_MANY:
8603 case DISAS_EXIT_UPDATE:
8604 case DISAS_CHAIN_UPDATE:
8605 gen_update_nip(ctx, nip);
8606 break;
8607 case DISAS_EXIT:
8608 case DISAS_CHAIN:
8609 break;
8610 default:
8611 g_assert_not_reached();
8614 gen_debug_exception(ctx);
8615 return;
8618 switch (is_jmp) {
8619 case DISAS_TOO_MANY:
8620 if (use_goto_tb(ctx, nip)) {
8621 pmu_count_insns(ctx);
8622 tcg_gen_goto_tb(0);
8623 gen_update_nip(ctx, nip);
8624 tcg_gen_exit_tb(ctx->base.tb, 0);
8625 break;
8627 /* fall through */
8628 case DISAS_CHAIN_UPDATE:
8629 gen_update_nip(ctx, nip);
8630 /* fall through */
8631 case DISAS_CHAIN:
8633 * tcg_gen_lookup_and_goto_ptr will exit the TB if
8634 * CF_NO_GOTO_PTR is set. Count insns now.
8636 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
8637 pmu_count_insns(ctx);
8640 tcg_gen_lookup_and_goto_ptr();
8641 break;
8643 case DISAS_EXIT_UPDATE:
8644 gen_update_nip(ctx, nip);
8645 /* fall through */
8646 case DISAS_EXIT:
8647 pmu_count_insns(ctx);
8648 tcg_gen_exit_tb(NULL, 0);
8649 break;
8651 default:
8652 g_assert_not_reached();
8656 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs)
8658 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
8659 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size);
8662 static const TranslatorOps ppc_tr_ops = {
8663 .init_disas_context = ppc_tr_init_disas_context,
8664 .tb_start = ppc_tr_tb_start,
8665 .insn_start = ppc_tr_insn_start,
8666 .translate_insn = ppc_tr_translate_insn,
8667 .tb_stop = ppc_tr_tb_stop,
8668 .disas_log = ppc_tr_disas_log,
8671 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
8673 DisasContext ctx;
8675 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns);
8678 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
8679 target_ulong *data)
8681 env->nip = data[0];