block: only call aio_poll on the current thread's AioContext
[qemu/ar7.git] / target-ppc / translate.c
blobdab8f19a916bbc7fb8c30f7728bb6d1cc181f2a7
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "qemu/host-utils.h"
27 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
32 #include "trace-tcg.h"
33 #include "exec/log.h"
36 #define CPU_SINGLE_STEP 0x1
37 #define CPU_BRANCH_STEP 0x2
38 #define GDBSTUB_SINGLE_STEP 0x4
40 /* Include definitions for instructions classes and implementations flags */
41 //#define PPC_DEBUG_DISAS
42 //#define DO_PPC_STATISTICS
44 #ifdef PPC_DEBUG_DISAS
45 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
46 #else
47 # define LOG_DISAS(...) do { } while (0)
48 #endif
49 /*****************************************************************************/
50 /* Code translation helpers */
52 /* global register indexes */
53 static TCGv_env cpu_env;
54 static char cpu_reg_names[10*3 + 22*4 /* GPR */
55 + 10*4 + 22*5 /* SPE GPRh */
56 + 10*4 + 22*5 /* FPR */
57 + 2*(10*6 + 22*7) /* AVRh, AVRl */
58 + 10*5 + 22*6 /* VSR */
59 + 8*5 /* CRF */];
60 static TCGv cpu_gpr[32];
61 static TCGv cpu_gprh[32];
62 static TCGv_i64 cpu_fpr[32];
63 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
64 static TCGv_i64 cpu_vsr[32];
65 static TCGv_i32 cpu_crf[8];
66 static TCGv cpu_nip;
67 static TCGv cpu_msr;
68 static TCGv cpu_ctr;
69 static TCGv cpu_lr;
70 #if defined(TARGET_PPC64)
71 static TCGv cpu_cfar;
72 #endif
73 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca;
74 static TCGv cpu_reserve;
75 static TCGv cpu_fpscr;
76 static TCGv_i32 cpu_access_type;
78 #include "exec/gen-icount.h"
80 void ppc_translate_init(void)
82 int i;
83 char* p;
84 size_t cpu_reg_names_size;
85 static int done_init = 0;
87 if (done_init)
88 return;
90 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
91 tcg_ctx.tcg_env = cpu_env;
93 p = cpu_reg_names;
94 cpu_reg_names_size = sizeof(cpu_reg_names);
96 for (i = 0; i < 8; i++) {
97 snprintf(p, cpu_reg_names_size, "crf%d", i);
98 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
99 offsetof(CPUPPCState, crf[i]), p);
100 p += 5;
101 cpu_reg_names_size -= 5;
104 for (i = 0; i < 32; i++) {
105 snprintf(p, cpu_reg_names_size, "r%d", i);
106 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
107 offsetof(CPUPPCState, gpr[i]), p);
108 p += (i < 10) ? 3 : 4;
109 cpu_reg_names_size -= (i < 10) ? 3 : 4;
110 snprintf(p, cpu_reg_names_size, "r%dH", i);
111 cpu_gprh[i] = tcg_global_mem_new(cpu_env,
112 offsetof(CPUPPCState, gprh[i]), p);
113 p += (i < 10) ? 4 : 5;
114 cpu_reg_names_size -= (i < 10) ? 4 : 5;
116 snprintf(p, cpu_reg_names_size, "fp%d", i);
117 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
118 offsetof(CPUPPCState, fpr[i]), p);
119 p += (i < 10) ? 4 : 5;
120 cpu_reg_names_size -= (i < 10) ? 4 : 5;
122 snprintf(p, cpu_reg_names_size, "avr%dH", i);
123 #ifdef HOST_WORDS_BIGENDIAN
124 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env,
125 offsetof(CPUPPCState, avr[i].u64[0]), p);
126 #else
127 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env,
128 offsetof(CPUPPCState, avr[i].u64[1]), p);
129 #endif
130 p += (i < 10) ? 6 : 7;
131 cpu_reg_names_size -= (i < 10) ? 6 : 7;
133 snprintf(p, cpu_reg_names_size, "avr%dL", i);
134 #ifdef HOST_WORDS_BIGENDIAN
135 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env,
136 offsetof(CPUPPCState, avr[i].u64[1]), p);
137 #else
138 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env,
139 offsetof(CPUPPCState, avr[i].u64[0]), p);
140 #endif
141 p += (i < 10) ? 6 : 7;
142 cpu_reg_names_size -= (i < 10) ? 6 : 7;
143 snprintf(p, cpu_reg_names_size, "vsr%d", i);
144 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env,
145 offsetof(CPUPPCState, vsr[i]), p);
146 p += (i < 10) ? 5 : 6;
147 cpu_reg_names_size -= (i < 10) ? 5 : 6;
150 cpu_nip = tcg_global_mem_new(cpu_env,
151 offsetof(CPUPPCState, nip), "nip");
153 cpu_msr = tcg_global_mem_new(cpu_env,
154 offsetof(CPUPPCState, msr), "msr");
156 cpu_ctr = tcg_global_mem_new(cpu_env,
157 offsetof(CPUPPCState, ctr), "ctr");
159 cpu_lr = tcg_global_mem_new(cpu_env,
160 offsetof(CPUPPCState, lr), "lr");
162 #if defined(TARGET_PPC64)
163 cpu_cfar = tcg_global_mem_new(cpu_env,
164 offsetof(CPUPPCState, cfar), "cfar");
165 #endif
167 cpu_xer = tcg_global_mem_new(cpu_env,
168 offsetof(CPUPPCState, xer), "xer");
169 cpu_so = tcg_global_mem_new(cpu_env,
170 offsetof(CPUPPCState, so), "SO");
171 cpu_ov = tcg_global_mem_new(cpu_env,
172 offsetof(CPUPPCState, ov), "OV");
173 cpu_ca = tcg_global_mem_new(cpu_env,
174 offsetof(CPUPPCState, ca), "CA");
176 cpu_reserve = tcg_global_mem_new(cpu_env,
177 offsetof(CPUPPCState, reserve_addr),
178 "reserve_addr");
180 cpu_fpscr = tcg_global_mem_new(cpu_env,
181 offsetof(CPUPPCState, fpscr), "fpscr");
183 cpu_access_type = tcg_global_mem_new_i32(cpu_env,
184 offsetof(CPUPPCState, access_type), "access_type");
186 done_init = 1;
189 /* internal defines */
190 struct DisasContext {
191 struct TranslationBlock *tb;
192 target_ulong nip;
193 uint32_t opcode;
194 uint32_t exception;
195 /* Routine used to access memory */
196 bool pr, hv, dr, le_mode;
197 bool lazy_tlb_flush;
198 bool need_access_type;
199 int mem_idx;
200 int access_type;
201 /* Translation flags */
202 TCGMemOp default_tcg_memop_mask;
203 #if defined(TARGET_PPC64)
204 bool sf_mode;
205 bool has_cfar;
206 #endif
207 bool fpu_enabled;
208 bool altivec_enabled;
209 bool vsx_enabled;
210 bool spe_enabled;
211 bool tm_enabled;
212 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
213 int singlestep_enabled;
214 uint64_t insns_flags;
215 uint64_t insns_flags2;
218 /* Return true iff byteswap is needed in a scalar memop */
219 static inline bool need_byteswap(const DisasContext *ctx)
221 #if defined(TARGET_WORDS_BIGENDIAN)
222 return ctx->le_mode;
223 #else
224 return !ctx->le_mode;
225 #endif
228 /* True when active word size < size of target_long. */
229 #ifdef TARGET_PPC64
230 # define NARROW_MODE(C) (!(C)->sf_mode)
231 #else
232 # define NARROW_MODE(C) 0
233 #endif
235 struct opc_handler_t {
236 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
237 uint32_t inval1;
238 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
239 uint32_t inval2;
240 /* instruction type */
241 uint64_t type;
242 /* extended instruction type */
243 uint64_t type2;
244 /* handler */
245 void (*handler)(DisasContext *ctx);
246 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
247 const char *oname;
248 #endif
249 #if defined(DO_PPC_STATISTICS)
250 uint64_t count;
251 #endif
254 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
256 if (ctx->need_access_type && ctx->access_type != access_type) {
257 tcg_gen_movi_i32(cpu_access_type, access_type);
258 ctx->access_type = access_type;
262 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
264 if (NARROW_MODE(ctx)) {
265 nip = (uint32_t)nip;
267 tcg_gen_movi_tl(cpu_nip, nip);
270 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
272 TCGv_i32 t0, t1;
274 /* These are all synchronous exceptions, we set the PC back to
275 * the faulting instruction
277 if (ctx->exception == POWERPC_EXCP_NONE) {
278 gen_update_nip(ctx, ctx->nip - 4);
280 t0 = tcg_const_i32(excp);
281 t1 = tcg_const_i32(error);
282 gen_helper_raise_exception_err(cpu_env, t0, t1);
283 tcg_temp_free_i32(t0);
284 tcg_temp_free_i32(t1);
285 ctx->exception = (excp);
288 static void gen_exception(DisasContext *ctx, uint32_t excp)
290 TCGv_i32 t0;
292 /* These are all synchronous exceptions, we set the PC back to
293 * the faulting instruction
295 if (ctx->exception == POWERPC_EXCP_NONE) {
296 gen_update_nip(ctx, ctx->nip - 4);
298 t0 = tcg_const_i32(excp);
299 gen_helper_raise_exception(cpu_env, t0);
300 tcg_temp_free_i32(t0);
301 ctx->exception = (excp);
304 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
305 target_ulong nip)
307 TCGv_i32 t0;
309 gen_update_nip(ctx, nip);
310 t0 = tcg_const_i32(excp);
311 gen_helper_raise_exception(cpu_env, t0);
312 tcg_temp_free_i32(t0);
313 ctx->exception = (excp);
316 static void gen_debug_exception(DisasContext *ctx)
318 TCGv_i32 t0;
320 /* These are all synchronous exceptions, we set the PC back to
321 * the faulting instruction
323 if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
324 (ctx->exception != POWERPC_EXCP_SYNC)) {
325 gen_update_nip(ctx, ctx->nip - 4);
327 t0 = tcg_const_i32(EXCP_DEBUG);
328 gen_helper_raise_exception(cpu_env, t0);
329 tcg_temp_free_i32(t0);
332 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
334 /* Will be converted to program check if needed */
335 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
338 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
340 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
343 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
345 /* Will be converted to program check if needed */
346 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
349 /* Stop translation */
350 static inline void gen_stop_exception(DisasContext *ctx)
352 gen_update_nip(ctx, ctx->nip);
353 ctx->exception = POWERPC_EXCP_STOP;
356 #ifndef CONFIG_USER_ONLY
357 /* No need to update nip here, as execution flow will change */
358 static inline void gen_sync_exception(DisasContext *ctx)
360 ctx->exception = POWERPC_EXCP_SYNC;
362 #endif
364 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
365 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
367 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
368 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
370 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
371 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
373 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
374 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
376 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
377 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
379 typedef struct opcode_t {
380 unsigned char opc1, opc2, opc3, opc4;
381 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
382 unsigned char pad[4];
383 #endif
384 opc_handler_t handler;
385 const char *oname;
386 } opcode_t;
388 /* Helpers for priv. check */
389 #define GEN_PRIV \
390 do { \
391 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
392 } while (0)
394 #if defined(CONFIG_USER_ONLY)
395 #define CHK_HV GEN_PRIV
396 #define CHK_SV GEN_PRIV
397 #define CHK_HVRM GEN_PRIV
398 #else
399 #define CHK_HV \
400 do { \
401 if (unlikely(ctx->pr || !ctx->hv)) { \
402 GEN_PRIV; \
404 } while (0)
405 #define CHK_SV \
406 do { \
407 if (unlikely(ctx->pr)) { \
408 GEN_PRIV; \
410 } while (0)
411 #define CHK_HVRM \
412 do { \
413 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
414 GEN_PRIV; \
416 } while (0)
417 #endif
419 #define CHK_NONE
422 /*****************************************************************************/
423 /*** Instruction decoding ***/
424 #define EXTRACT_HELPER(name, shift, nb) \
425 static inline uint32_t name(uint32_t opcode) \
427 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
430 #define EXTRACT_SHELPER(name, shift, nb) \
431 static inline int32_t name(uint32_t opcode) \
433 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
436 #define EXTRACT_HELPER_SPLIT(name, shift1, nb1, shift2, nb2) \
437 static inline uint32_t name(uint32_t opcode) \
439 return (((opcode >> (shift1)) & ((1 << (nb1)) - 1)) << nb2) | \
440 ((opcode >> (shift2)) & ((1 << (nb2)) - 1)); \
443 #define EXTRACT_HELPER_DXFORM(name, \
444 d0_bits, shift_op_d0, shift_d0, \
445 d1_bits, shift_op_d1, shift_d1, \
446 d2_bits, shift_op_d2, shift_d2) \
447 static inline int16_t name(uint32_t opcode) \
449 return \
450 (((opcode >> (shift_op_d0)) & ((1 << (d0_bits)) - 1)) << (shift_d0)) | \
451 (((opcode >> (shift_op_d1)) & ((1 << (d1_bits)) - 1)) << (shift_d1)) | \
452 (((opcode >> (shift_op_d2)) & ((1 << (d2_bits)) - 1)) << (shift_d2)); \
456 /* Opcode part 1 */
457 EXTRACT_HELPER(opc1, 26, 6);
458 /* Opcode part 2 */
459 EXTRACT_HELPER(opc2, 1, 5);
460 /* Opcode part 3 */
461 EXTRACT_HELPER(opc3, 6, 5);
462 /* Opcode part 4 */
463 EXTRACT_HELPER(opc4, 16, 5);
464 /* Update Cr0 flags */
465 EXTRACT_HELPER(Rc, 0, 1);
466 /* Update Cr6 flags (Altivec) */
467 EXTRACT_HELPER(Rc21, 10, 1);
468 /* Destination */
469 EXTRACT_HELPER(rD, 21, 5);
470 /* Source */
471 EXTRACT_HELPER(rS, 21, 5);
472 /* First operand */
473 EXTRACT_HELPER(rA, 16, 5);
474 /* Second operand */
475 EXTRACT_HELPER(rB, 11, 5);
476 /* Third operand */
477 EXTRACT_HELPER(rC, 6, 5);
478 /*** Get CRn ***/
479 EXTRACT_HELPER(crfD, 23, 3);
480 EXTRACT_HELPER(crfS, 18, 3);
481 EXTRACT_HELPER(crbD, 21, 5);
482 EXTRACT_HELPER(crbA, 16, 5);
483 EXTRACT_HELPER(crbB, 11, 5);
484 /* SPR / TBL */
485 EXTRACT_HELPER(_SPR, 11, 10);
486 static inline uint32_t SPR(uint32_t opcode)
488 uint32_t sprn = _SPR(opcode);
490 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
492 /*** Get constants ***/
493 /* 16 bits signed immediate value */
494 EXTRACT_SHELPER(SIMM, 0, 16);
495 /* 16 bits unsigned immediate value */
496 EXTRACT_HELPER(UIMM, 0, 16);
497 /* 5 bits signed immediate value */
498 EXTRACT_HELPER(SIMM5, 16, 5);
499 /* 5 bits signed immediate value */
500 EXTRACT_HELPER(UIMM5, 16, 5);
501 /* 4 bits unsigned immediate value */
502 EXTRACT_HELPER(UIMM4, 16, 4);
503 /* Bit count */
504 EXTRACT_HELPER(NB, 11, 5);
505 /* Shift count */
506 EXTRACT_HELPER(SH, 11, 5);
507 /* Vector shift count */
508 EXTRACT_HELPER(VSH, 6, 4);
509 /* Mask start */
510 EXTRACT_HELPER(MB, 6, 5);
511 /* Mask end */
512 EXTRACT_HELPER(ME, 1, 5);
513 /* Trap operand */
514 EXTRACT_HELPER(TO, 21, 5);
516 EXTRACT_HELPER(CRM, 12, 8);
518 #ifndef CONFIG_USER_ONLY
519 EXTRACT_HELPER(SR, 16, 4);
520 #endif
522 /* mtfsf/mtfsfi */
523 EXTRACT_HELPER(FPBF, 23, 3);
524 EXTRACT_HELPER(FPIMM, 12, 4);
525 EXTRACT_HELPER(FPL, 25, 1);
526 EXTRACT_HELPER(FPFLM, 17, 8);
527 EXTRACT_HELPER(FPW, 16, 1);
529 /* addpcis */
530 EXTRACT_HELPER_DXFORM(DX, 10, 6, 6, 5, 16, 1, 1, 0, 0)
531 #if defined(TARGET_PPC64)
532 /* darn */
533 EXTRACT_HELPER(L, 16, 2);
534 #endif
536 /*** Jump target decoding ***/
537 /* Immediate address */
538 static inline target_ulong LI(uint32_t opcode)
540 return (opcode >> 0) & 0x03FFFFFC;
543 static inline uint32_t BD(uint32_t opcode)
545 return (opcode >> 0) & 0xFFFC;
548 EXTRACT_HELPER(BO, 21, 5);
549 EXTRACT_HELPER(BI, 16, 5);
550 /* Absolute/relative address */
551 EXTRACT_HELPER(AA, 1, 1);
552 /* Link */
553 EXTRACT_HELPER(LK, 0, 1);
555 /* DFP Z22-form */
556 EXTRACT_HELPER(DCM, 10, 6)
558 /* DFP Z23-form */
559 EXTRACT_HELPER(RMC, 9, 2)
561 /* Create a mask between <start> and <end> bits */
562 static inline target_ulong MASK(uint32_t start, uint32_t end)
564 target_ulong ret;
566 #if defined(TARGET_PPC64)
567 if (likely(start == 0)) {
568 ret = UINT64_MAX << (63 - end);
569 } else if (likely(end == 63)) {
570 ret = UINT64_MAX >> start;
572 #else
573 if (likely(start == 0)) {
574 ret = UINT32_MAX << (31 - end);
575 } else if (likely(end == 31)) {
576 ret = UINT32_MAX >> start;
578 #endif
579 else {
580 ret = (((target_ulong)(-1ULL)) >> (start)) ^
581 (((target_ulong)(-1ULL) >> (end)) >> 1);
582 if (unlikely(start > end))
583 return ~ret;
586 return ret;
589 EXTRACT_HELPER_SPLIT(xT, 0, 1, 21, 5);
590 EXTRACT_HELPER_SPLIT(xS, 0, 1, 21, 5);
591 EXTRACT_HELPER_SPLIT(xA, 2, 1, 16, 5);
592 EXTRACT_HELPER_SPLIT(xB, 1, 1, 11, 5);
593 EXTRACT_HELPER_SPLIT(xC, 3, 1, 6, 5);
594 EXTRACT_HELPER(DM, 8, 2);
595 EXTRACT_HELPER(UIM, 16, 2);
596 EXTRACT_HELPER(SHW, 8, 2);
597 EXTRACT_HELPER(SP, 19, 2);
598 EXTRACT_HELPER(IMM8, 11, 8);
600 /*****************************************************************************/
601 /* PowerPC instructions table */
603 #if defined(DO_PPC_STATISTICS)
604 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
606 .opc1 = op1, \
607 .opc2 = op2, \
608 .opc3 = op3, \
609 .opc4 = 0xff, \
610 .handler = { \
611 .inval1 = invl, \
612 .type = _typ, \
613 .type2 = _typ2, \
614 .handler = &gen_##name, \
615 .oname = stringify(name), \
616 }, \
617 .oname = stringify(name), \
619 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
621 .opc1 = op1, \
622 .opc2 = op2, \
623 .opc3 = op3, \
624 .opc4 = 0xff, \
625 .handler = { \
626 .inval1 = invl1, \
627 .inval2 = invl2, \
628 .type = _typ, \
629 .type2 = _typ2, \
630 .handler = &gen_##name, \
631 .oname = stringify(name), \
632 }, \
633 .oname = stringify(name), \
635 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
637 .opc1 = op1, \
638 .opc2 = op2, \
639 .opc3 = op3, \
640 .opc4 = 0xff, \
641 .handler = { \
642 .inval1 = invl, \
643 .type = _typ, \
644 .type2 = _typ2, \
645 .handler = &gen_##name, \
646 .oname = onam, \
647 }, \
648 .oname = onam, \
650 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
652 .opc1 = op1, \
653 .opc2 = op2, \
654 .opc3 = op3, \
655 .opc4 = op4, \
656 .handler = { \
657 .inval1 = invl, \
658 .type = _typ, \
659 .type2 = _typ2, \
660 .handler = &gen_##name, \
661 .oname = stringify(name), \
662 }, \
663 .oname = stringify(name), \
665 #else
666 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
668 .opc1 = op1, \
669 .opc2 = op2, \
670 .opc3 = op3, \
671 .opc4 = 0xff, \
672 .handler = { \
673 .inval1 = invl, \
674 .type = _typ, \
675 .type2 = _typ2, \
676 .handler = &gen_##name, \
677 }, \
678 .oname = stringify(name), \
680 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
682 .opc1 = op1, \
683 .opc2 = op2, \
684 .opc3 = op3, \
685 .opc4 = 0xff, \
686 .handler = { \
687 .inval1 = invl1, \
688 .inval2 = invl2, \
689 .type = _typ, \
690 .type2 = _typ2, \
691 .handler = &gen_##name, \
692 }, \
693 .oname = stringify(name), \
695 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
697 .opc1 = op1, \
698 .opc2 = op2, \
699 .opc3 = op3, \
700 .opc4 = 0xff, \
701 .handler = { \
702 .inval1 = invl, \
703 .type = _typ, \
704 .type2 = _typ2, \
705 .handler = &gen_##name, \
706 }, \
707 .oname = onam, \
709 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
711 .opc1 = op1, \
712 .opc2 = op2, \
713 .opc3 = op3, \
714 .opc4 = op4, \
715 .handler = { \
716 .inval1 = invl, \
717 .type = _typ, \
718 .type2 = _typ2, \
719 .handler = &gen_##name, \
720 }, \
721 .oname = stringify(name), \
723 #endif
725 /* SPR load/store helpers */
726 static inline void gen_load_spr(TCGv t, int reg)
728 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
731 static inline void gen_store_spr(int reg, TCGv t)
733 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
736 /* Invalid instruction */
737 static void gen_invalid(DisasContext *ctx)
739 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
742 static opc_handler_t invalid_handler = {
743 .inval1 = 0xFFFFFFFF,
744 .inval2 = 0xFFFFFFFF,
745 .type = PPC_NONE,
746 .type2 = PPC_NONE,
747 .handler = gen_invalid,
750 /*** Integer comparison ***/
752 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
754 TCGv t0 = tcg_temp_new();
755 TCGv_i32 t1 = tcg_temp_new_i32();
757 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
759 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1);
760 tcg_gen_trunc_tl_i32(t1, t0);
761 tcg_gen_shli_i32(t1, t1, CRF_LT);
762 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
764 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1);
765 tcg_gen_trunc_tl_i32(t1, t0);
766 tcg_gen_shli_i32(t1, t1, CRF_GT);
767 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
769 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1);
770 tcg_gen_trunc_tl_i32(t1, t0);
771 tcg_gen_shli_i32(t1, t1, CRF_EQ);
772 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
774 tcg_temp_free(t0);
775 tcg_temp_free_i32(t1);
778 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
780 TCGv t0 = tcg_const_tl(arg1);
781 gen_op_cmp(arg0, t0, s, crf);
782 tcg_temp_free(t0);
785 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
787 TCGv t0, t1;
788 t0 = tcg_temp_new();
789 t1 = tcg_temp_new();
790 if (s) {
791 tcg_gen_ext32s_tl(t0, arg0);
792 tcg_gen_ext32s_tl(t1, arg1);
793 } else {
794 tcg_gen_ext32u_tl(t0, arg0);
795 tcg_gen_ext32u_tl(t1, arg1);
797 gen_op_cmp(t0, t1, s, crf);
798 tcg_temp_free(t1);
799 tcg_temp_free(t0);
802 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
804 TCGv t0 = tcg_const_tl(arg1);
805 gen_op_cmp32(arg0, t0, s, crf);
806 tcg_temp_free(t0);
809 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
811 if (NARROW_MODE(ctx)) {
812 gen_op_cmpi32(reg, 0, 1, 0);
813 } else {
814 gen_op_cmpi(reg, 0, 1, 0);
818 /* cmp */
819 static void gen_cmp(DisasContext *ctx)
821 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
822 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
823 1, crfD(ctx->opcode));
824 } else {
825 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
826 1, crfD(ctx->opcode));
830 /* cmpi */
831 static void gen_cmpi(DisasContext *ctx)
833 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
834 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
835 1, crfD(ctx->opcode));
836 } else {
837 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
838 1, crfD(ctx->opcode));
842 /* cmpl */
843 static void gen_cmpl(DisasContext *ctx)
845 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
846 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
847 0, crfD(ctx->opcode));
848 } else {
849 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
850 0, crfD(ctx->opcode));
854 /* cmpli */
855 static void gen_cmpli(DisasContext *ctx)
857 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
858 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
859 0, crfD(ctx->opcode));
860 } else {
861 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
862 0, crfD(ctx->opcode));
866 /* cmprb - range comparison: isupper, isaplha, islower*/
867 static void gen_cmprb(DisasContext *ctx)
869 TCGv_i32 src1 = tcg_temp_new_i32();
870 TCGv_i32 src2 = tcg_temp_new_i32();
871 TCGv_i32 src2lo = tcg_temp_new_i32();
872 TCGv_i32 src2hi = tcg_temp_new_i32();
873 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
875 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
876 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
878 tcg_gen_andi_i32(src1, src1, 0xFF);
879 tcg_gen_ext8u_i32(src2lo, src2);
880 tcg_gen_shri_i32(src2, src2, 8);
881 tcg_gen_ext8u_i32(src2hi, src2);
883 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
884 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
885 tcg_gen_and_i32(crf, src2lo, src2hi);
887 if (ctx->opcode & 0x00200000) {
888 tcg_gen_shri_i32(src2, src2, 8);
889 tcg_gen_ext8u_i32(src2lo, src2);
890 tcg_gen_shri_i32(src2, src2, 8);
891 tcg_gen_ext8u_i32(src2hi, src2);
892 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
893 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
894 tcg_gen_and_i32(src2lo, src2lo, src2hi);
895 tcg_gen_or_i32(crf, crf, src2lo);
897 tcg_gen_shli_i32(crf, crf, CRF_GT);
898 tcg_temp_free_i32(src1);
899 tcg_temp_free_i32(src2);
900 tcg_temp_free_i32(src2lo);
901 tcg_temp_free_i32(src2hi);
904 #if defined(TARGET_PPC64)
905 /* cmpeqb */
906 static void gen_cmpeqb(DisasContext *ctx)
908 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
909 cpu_gpr[rB(ctx->opcode)]);
911 #endif
913 /* isel (PowerPC 2.03 specification) */
914 static void gen_isel(DisasContext *ctx)
916 uint32_t bi = rC(ctx->opcode);
917 uint32_t mask = 0x08 >> (bi & 0x03);
918 TCGv t0 = tcg_temp_new();
919 TCGv zr;
921 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
922 tcg_gen_andi_tl(t0, t0, mask);
924 zr = tcg_const_tl(0);
925 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
926 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
927 cpu_gpr[rB(ctx->opcode)]);
928 tcg_temp_free(zr);
929 tcg_temp_free(t0);
932 /* cmpb: PowerPC 2.05 specification */
933 static void gen_cmpb(DisasContext *ctx)
935 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
936 cpu_gpr[rB(ctx->opcode)]);
939 /*** Integer arithmetic ***/
941 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
942 TCGv arg1, TCGv arg2, int sub)
944 TCGv t0 = tcg_temp_new();
946 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
947 tcg_gen_xor_tl(t0, arg1, arg2);
948 if (sub) {
949 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
950 } else {
951 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
953 tcg_temp_free(t0);
954 if (NARROW_MODE(ctx)) {
955 tcg_gen_ext32s_tl(cpu_ov, cpu_ov);
957 tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1);
958 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
961 /* Common add function */
962 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
963 TCGv arg2, bool add_ca, bool compute_ca,
964 bool compute_ov, bool compute_rc0)
966 TCGv t0 = ret;
968 if (compute_ca || compute_ov) {
969 t0 = tcg_temp_new();
972 if (compute_ca) {
973 if (NARROW_MODE(ctx)) {
974 /* Caution: a non-obvious corner case of the spec is that we
975 must produce the *entire* 64-bit addition, but produce the
976 carry into bit 32. */
977 TCGv t1 = tcg_temp_new();
978 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
979 tcg_gen_add_tl(t0, arg1, arg2);
980 if (add_ca) {
981 tcg_gen_add_tl(t0, t0, cpu_ca);
983 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */
984 tcg_temp_free(t1);
985 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
986 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
987 } else {
988 TCGv zero = tcg_const_tl(0);
989 if (add_ca) {
990 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero);
991 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero);
992 } else {
993 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero);
995 tcg_temp_free(zero);
997 } else {
998 tcg_gen_add_tl(t0, arg1, arg2);
999 if (add_ca) {
1000 tcg_gen_add_tl(t0, t0, cpu_ca);
1004 if (compute_ov) {
1005 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1007 if (unlikely(compute_rc0)) {
1008 gen_set_Rc0(ctx, t0);
1011 if (!TCGV_EQUAL(t0, ret)) {
1012 tcg_gen_mov_tl(ret, t0);
1013 tcg_temp_free(t0);
1016 /* Add functions with two operands */
1017 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
1018 static void glue(gen_, name)(DisasContext *ctx) \
1020 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1021 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1022 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1024 /* Add functions with one operand and one immediate */
1025 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
1026 add_ca, compute_ca, compute_ov) \
1027 static void glue(gen_, name)(DisasContext *ctx) \
1029 TCGv t0 = tcg_const_tl(const_val); \
1030 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1031 cpu_gpr[rA(ctx->opcode)], t0, \
1032 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1033 tcg_temp_free(t0); \
1036 /* add add. addo addo. */
1037 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
1038 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
1039 /* addc addc. addco addco. */
1040 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
1041 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
1042 /* adde adde. addeo addeo. */
1043 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
1044 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
1045 /* addme addme. addmeo addmeo. */
1046 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
1047 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
1048 /* addze addze. addzeo addzeo.*/
1049 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
1050 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
1051 /* addi */
1052 static void gen_addi(DisasContext *ctx)
1054 target_long simm = SIMM(ctx->opcode);
1056 if (rA(ctx->opcode) == 0) {
1057 /* li case */
1058 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1059 } else {
1060 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1061 cpu_gpr[rA(ctx->opcode)], simm);
1064 /* addic addic.*/
1065 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1067 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1068 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1069 c, 0, 1, 0, compute_rc0);
1070 tcg_temp_free(c);
1073 static void gen_addic(DisasContext *ctx)
1075 gen_op_addic(ctx, 0);
1078 static void gen_addic_(DisasContext *ctx)
1080 gen_op_addic(ctx, 1);
1083 /* addis */
1084 static void gen_addis(DisasContext *ctx)
1086 target_long simm = SIMM(ctx->opcode);
1088 if (rA(ctx->opcode) == 0) {
1089 /* lis case */
1090 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1091 } else {
1092 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1093 cpu_gpr[rA(ctx->opcode)], simm << 16);
1097 /* addpcis */
1098 static void gen_addpcis(DisasContext *ctx)
1100 target_long d = DX(ctx->opcode);
1102 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->nip + (d << 16));
1105 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1106 TCGv arg2, int sign, int compute_ov)
1108 TCGv_i32 t0 = tcg_temp_new_i32();
1109 TCGv_i32 t1 = tcg_temp_new_i32();
1110 TCGv_i32 t2 = tcg_temp_new_i32();
1111 TCGv_i32 t3 = tcg_temp_new_i32();
1113 tcg_gen_trunc_tl_i32(t0, arg1);
1114 tcg_gen_trunc_tl_i32(t1, arg2);
1115 if (sign) {
1116 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1117 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1118 tcg_gen_and_i32(t2, t2, t3);
1119 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1120 tcg_gen_or_i32(t2, t2, t3);
1121 tcg_gen_movi_i32(t3, 0);
1122 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1123 tcg_gen_div_i32(t3, t0, t1);
1124 tcg_gen_extu_i32_tl(ret, t3);
1125 } else {
1126 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1127 tcg_gen_movi_i32(t3, 0);
1128 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1129 tcg_gen_divu_i32(t3, t0, t1);
1130 tcg_gen_extu_i32_tl(ret, t3);
1132 if (compute_ov) {
1133 tcg_gen_extu_i32_tl(cpu_ov, t2);
1134 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1136 tcg_temp_free_i32(t0);
1137 tcg_temp_free_i32(t1);
1138 tcg_temp_free_i32(t2);
1139 tcg_temp_free_i32(t3);
1141 if (unlikely(Rc(ctx->opcode) != 0))
1142 gen_set_Rc0(ctx, ret);
1144 /* Div functions */
1145 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1146 static void glue(gen_, name)(DisasContext *ctx) \
1148 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1149 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1150 sign, compute_ov); \
1152 /* divwu divwu. divwuo divwuo. */
1153 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1154 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1155 /* divw divw. divwo divwo. */
1156 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1157 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1159 /* div[wd]eu[o][.] */
1160 #define GEN_DIVE(name, hlpr, compute_ov) \
1161 static void gen_##name(DisasContext *ctx) \
1163 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
1164 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
1165 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1166 tcg_temp_free_i32(t0); \
1167 if (unlikely(Rc(ctx->opcode) != 0)) { \
1168 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1172 GEN_DIVE(divweu, divweu, 0);
1173 GEN_DIVE(divweuo, divweu, 1);
1174 GEN_DIVE(divwe, divwe, 0);
1175 GEN_DIVE(divweo, divwe, 1);
1177 #if defined(TARGET_PPC64)
1178 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1179 TCGv arg2, int sign, int compute_ov)
1181 TCGv_i64 t0 = tcg_temp_new_i64();
1182 TCGv_i64 t1 = tcg_temp_new_i64();
1183 TCGv_i64 t2 = tcg_temp_new_i64();
1184 TCGv_i64 t3 = tcg_temp_new_i64();
1186 tcg_gen_mov_i64(t0, arg1);
1187 tcg_gen_mov_i64(t1, arg2);
1188 if (sign) {
1189 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1190 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1191 tcg_gen_and_i64(t2, t2, t3);
1192 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1193 tcg_gen_or_i64(t2, t2, t3);
1194 tcg_gen_movi_i64(t3, 0);
1195 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1196 tcg_gen_div_i64(ret, t0, t1);
1197 } else {
1198 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1199 tcg_gen_movi_i64(t3, 0);
1200 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1201 tcg_gen_divu_i64(ret, t0, t1);
1203 if (compute_ov) {
1204 tcg_gen_mov_tl(cpu_ov, t2);
1205 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1207 tcg_temp_free_i64(t0);
1208 tcg_temp_free_i64(t1);
1209 tcg_temp_free_i64(t2);
1210 tcg_temp_free_i64(t3);
1212 if (unlikely(Rc(ctx->opcode) != 0))
1213 gen_set_Rc0(ctx, ret);
1216 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1217 static void glue(gen_, name)(DisasContext *ctx) \
1219 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1220 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1221 sign, compute_ov); \
1223 /* divwu divwu. divwuo divwuo. */
1224 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1225 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1226 /* divw divw. divwo divwo. */
1227 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1228 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1230 GEN_DIVE(divdeu, divdeu, 0);
1231 GEN_DIVE(divdeuo, divdeu, 1);
1232 GEN_DIVE(divde, divde, 0);
1233 GEN_DIVE(divdeo, divde, 1);
1234 #endif
1236 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1237 TCGv arg2, int sign)
1239 TCGv_i32 t0 = tcg_temp_new_i32();
1240 TCGv_i32 t1 = tcg_temp_new_i32();
1242 tcg_gen_trunc_tl_i32(t0, arg1);
1243 tcg_gen_trunc_tl_i32(t1, arg2);
1244 if (sign) {
1245 TCGv_i32 t2 = tcg_temp_new_i32();
1246 TCGv_i32 t3 = tcg_temp_new_i32();
1247 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1248 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1249 tcg_gen_and_i32(t2, t2, t3);
1250 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1251 tcg_gen_or_i32(t2, t2, t3);
1252 tcg_gen_movi_i32(t3, 0);
1253 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1254 tcg_gen_rem_i32(t3, t0, t1);
1255 tcg_gen_ext_i32_tl(ret, t3);
1256 tcg_temp_free_i32(t2);
1257 tcg_temp_free_i32(t3);
1258 } else {
1259 TCGv_i32 t2 = tcg_const_i32(1);
1260 TCGv_i32 t3 = tcg_const_i32(0);
1261 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1262 tcg_gen_remu_i32(t3, t0, t1);
1263 tcg_gen_extu_i32_tl(ret, t3);
1264 tcg_temp_free_i32(t2);
1265 tcg_temp_free_i32(t3);
1267 tcg_temp_free_i32(t0);
1268 tcg_temp_free_i32(t1);
1271 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
1272 static void glue(gen_, name)(DisasContext *ctx) \
1274 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
1275 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1276 sign); \
1279 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1280 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1282 #if defined(TARGET_PPC64)
1283 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1284 TCGv arg2, int sign)
1286 TCGv_i64 t0 = tcg_temp_new_i64();
1287 TCGv_i64 t1 = tcg_temp_new_i64();
1289 tcg_gen_mov_i64(t0, arg1);
1290 tcg_gen_mov_i64(t1, arg2);
1291 if (sign) {
1292 TCGv_i64 t2 = tcg_temp_new_i64();
1293 TCGv_i64 t3 = tcg_temp_new_i64();
1294 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1295 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1296 tcg_gen_and_i64(t2, t2, t3);
1297 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1298 tcg_gen_or_i64(t2, t2, t3);
1299 tcg_gen_movi_i64(t3, 0);
1300 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1301 tcg_gen_rem_i64(ret, t0, t1);
1302 tcg_temp_free_i64(t2);
1303 tcg_temp_free_i64(t3);
1304 } else {
1305 TCGv_i64 t2 = tcg_const_i64(1);
1306 TCGv_i64 t3 = tcg_const_i64(0);
1307 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1308 tcg_gen_remu_i64(ret, t0, t1);
1309 tcg_temp_free_i64(t2);
1310 tcg_temp_free_i64(t3);
1312 tcg_temp_free_i64(t0);
1313 tcg_temp_free_i64(t1);
1316 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
1317 static void glue(gen_, name)(DisasContext *ctx) \
1319 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
1320 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1321 sign); \
1324 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1325 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1326 #endif
1328 /* mulhw mulhw. */
1329 static void gen_mulhw(DisasContext *ctx)
1331 TCGv_i32 t0 = tcg_temp_new_i32();
1332 TCGv_i32 t1 = tcg_temp_new_i32();
1334 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1335 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1336 tcg_gen_muls2_i32(t0, t1, t0, t1);
1337 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1338 tcg_temp_free_i32(t0);
1339 tcg_temp_free_i32(t1);
1340 if (unlikely(Rc(ctx->opcode) != 0))
1341 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1344 /* mulhwu mulhwu. */
1345 static void gen_mulhwu(DisasContext *ctx)
1347 TCGv_i32 t0 = tcg_temp_new_i32();
1348 TCGv_i32 t1 = tcg_temp_new_i32();
1350 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1351 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1352 tcg_gen_mulu2_i32(t0, t1, t0, t1);
1353 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1354 tcg_temp_free_i32(t0);
1355 tcg_temp_free_i32(t1);
1356 if (unlikely(Rc(ctx->opcode) != 0))
1357 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1360 /* mullw mullw. */
1361 static void gen_mullw(DisasContext *ctx)
1363 #if defined(TARGET_PPC64)
1364 TCGv_i64 t0, t1;
1365 t0 = tcg_temp_new_i64();
1366 t1 = tcg_temp_new_i64();
1367 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1368 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1369 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1370 tcg_temp_free(t0);
1371 tcg_temp_free(t1);
1372 #else
1373 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1374 cpu_gpr[rB(ctx->opcode)]);
1375 #endif
1376 if (unlikely(Rc(ctx->opcode) != 0))
1377 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1380 /* mullwo mullwo. */
1381 static void gen_mullwo(DisasContext *ctx)
1383 TCGv_i32 t0 = tcg_temp_new_i32();
1384 TCGv_i32 t1 = tcg_temp_new_i32();
1386 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1387 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1388 tcg_gen_muls2_i32(t0, t1, t0, t1);
1389 #if defined(TARGET_PPC64)
1390 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1391 #else
1392 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
1393 #endif
1395 tcg_gen_sari_i32(t0, t0, 31);
1396 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1397 tcg_gen_extu_i32_tl(cpu_ov, t0);
1398 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1400 tcg_temp_free_i32(t0);
1401 tcg_temp_free_i32(t1);
1402 if (unlikely(Rc(ctx->opcode) != 0))
1403 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1406 /* mulli */
1407 static void gen_mulli(DisasContext *ctx)
1409 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1410 SIMM(ctx->opcode));
1413 #if defined(TARGET_PPC64)
1414 /* mulhd mulhd. */
1415 static void gen_mulhd(DisasContext *ctx)
1417 TCGv lo = tcg_temp_new();
1418 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1419 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1420 tcg_temp_free(lo);
1421 if (unlikely(Rc(ctx->opcode) != 0)) {
1422 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1426 /* mulhdu mulhdu. */
1427 static void gen_mulhdu(DisasContext *ctx)
1429 TCGv lo = tcg_temp_new();
1430 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1431 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1432 tcg_temp_free(lo);
1433 if (unlikely(Rc(ctx->opcode) != 0)) {
1434 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1438 /* mulld mulld. */
1439 static void gen_mulld(DisasContext *ctx)
1441 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1442 cpu_gpr[rB(ctx->opcode)]);
1443 if (unlikely(Rc(ctx->opcode) != 0))
1444 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1447 /* mulldo mulldo. */
1448 static void gen_mulldo(DisasContext *ctx)
1450 TCGv_i64 t0 = tcg_temp_new_i64();
1451 TCGv_i64 t1 = tcg_temp_new_i64();
1453 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
1454 cpu_gpr[rB(ctx->opcode)]);
1455 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
1457 tcg_gen_sari_i64(t0, t0, 63);
1458 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
1459 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1461 tcg_temp_free_i64(t0);
1462 tcg_temp_free_i64(t1);
1464 if (unlikely(Rc(ctx->opcode) != 0)) {
1465 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1468 #endif
1470 /* Common subf function */
1471 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1472 TCGv arg2, bool add_ca, bool compute_ca,
1473 bool compute_ov, bool compute_rc0)
1475 TCGv t0 = ret;
1477 if (compute_ca || compute_ov) {
1478 t0 = tcg_temp_new();
1481 if (compute_ca) {
1482 /* dest = ~arg1 + arg2 [+ ca]. */
1483 if (NARROW_MODE(ctx)) {
1484 /* Caution: a non-obvious corner case of the spec is that we
1485 must produce the *entire* 64-bit addition, but produce the
1486 carry into bit 32. */
1487 TCGv inv1 = tcg_temp_new();
1488 TCGv t1 = tcg_temp_new();
1489 tcg_gen_not_tl(inv1, arg1);
1490 if (add_ca) {
1491 tcg_gen_add_tl(t0, arg2, cpu_ca);
1492 } else {
1493 tcg_gen_addi_tl(t0, arg2, 1);
1495 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
1496 tcg_gen_add_tl(t0, t0, inv1);
1497 tcg_temp_free(inv1);
1498 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
1499 tcg_temp_free(t1);
1500 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
1501 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
1502 } else if (add_ca) {
1503 TCGv zero, inv1 = tcg_temp_new();
1504 tcg_gen_not_tl(inv1, arg1);
1505 zero = tcg_const_tl(0);
1506 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1507 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1508 tcg_temp_free(zero);
1509 tcg_temp_free(inv1);
1510 } else {
1511 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1512 tcg_gen_sub_tl(t0, arg2, arg1);
1514 } else if (add_ca) {
1515 /* Since we're ignoring carry-out, we can simplify the
1516 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1517 tcg_gen_sub_tl(t0, arg2, arg1);
1518 tcg_gen_add_tl(t0, t0, cpu_ca);
1519 tcg_gen_subi_tl(t0, t0, 1);
1520 } else {
1521 tcg_gen_sub_tl(t0, arg2, arg1);
1524 if (compute_ov) {
1525 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1527 if (unlikely(compute_rc0)) {
1528 gen_set_Rc0(ctx, t0);
1531 if (!TCGV_EQUAL(t0, ret)) {
1532 tcg_gen_mov_tl(ret, t0);
1533 tcg_temp_free(t0);
1536 /* Sub functions with Two operands functions */
1537 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1538 static void glue(gen_, name)(DisasContext *ctx) \
1540 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1541 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1542 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1544 /* Sub functions with one operand and one immediate */
1545 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1546 add_ca, compute_ca, compute_ov) \
1547 static void glue(gen_, name)(DisasContext *ctx) \
1549 TCGv t0 = tcg_const_tl(const_val); \
1550 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1551 cpu_gpr[rA(ctx->opcode)], t0, \
1552 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1553 tcg_temp_free(t0); \
1555 /* subf subf. subfo subfo. */
1556 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1557 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1558 /* subfc subfc. subfco subfco. */
1559 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1560 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1561 /* subfe subfe. subfeo subfo. */
1562 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1563 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1564 /* subfme subfme. subfmeo subfmeo. */
1565 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1566 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1567 /* subfze subfze. subfzeo subfzeo.*/
1568 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1569 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1571 /* subfic */
1572 static void gen_subfic(DisasContext *ctx)
1574 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1575 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1576 c, 0, 1, 0, 0);
1577 tcg_temp_free(c);
1580 /* neg neg. nego nego. */
1581 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
1583 TCGv zero = tcg_const_tl(0);
1584 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1585 zero, 0, 0, compute_ov, Rc(ctx->opcode));
1586 tcg_temp_free(zero);
1589 static void gen_neg(DisasContext *ctx)
1591 gen_op_arith_neg(ctx, 0);
1594 static void gen_nego(DisasContext *ctx)
1596 gen_op_arith_neg(ctx, 1);
1599 /*** Integer logical ***/
1600 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1601 static void glue(gen_, name)(DisasContext *ctx) \
1603 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1604 cpu_gpr[rB(ctx->opcode)]); \
1605 if (unlikely(Rc(ctx->opcode) != 0)) \
1606 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1609 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1610 static void glue(gen_, name)(DisasContext *ctx) \
1612 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1613 if (unlikely(Rc(ctx->opcode) != 0)) \
1614 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1617 /* and & and. */
1618 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1619 /* andc & andc. */
1620 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1622 /* andi. */
1623 static void gen_andi_(DisasContext *ctx)
1625 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1626 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1629 /* andis. */
1630 static void gen_andis_(DisasContext *ctx)
1632 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1633 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1636 /* cntlzw */
1637 static void gen_cntlzw(DisasContext *ctx)
1639 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1640 if (unlikely(Rc(ctx->opcode) != 0))
1641 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1644 /* cnttzw */
1645 static void gen_cnttzw(DisasContext *ctx)
1647 gen_helper_cnttzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1648 if (unlikely(Rc(ctx->opcode) != 0)) {
1649 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1653 /* eqv & eqv. */
1654 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1655 /* extsb & extsb. */
1656 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1657 /* extsh & extsh. */
1658 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1659 /* nand & nand. */
1660 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1661 /* nor & nor. */
1662 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1664 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
1665 static void gen_pause(DisasContext *ctx)
1667 TCGv_i32 t0 = tcg_const_i32(0);
1668 tcg_gen_st_i32(t0, cpu_env,
1669 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
1670 tcg_temp_free_i32(t0);
1672 /* Stop translation, this gives other CPUs a chance to run */
1673 gen_exception_nip(ctx, EXCP_HLT, ctx->nip);
1675 #endif /* defined(TARGET_PPC64) */
1677 /* or & or. */
1678 static void gen_or(DisasContext *ctx)
1680 int rs, ra, rb;
1682 rs = rS(ctx->opcode);
1683 ra = rA(ctx->opcode);
1684 rb = rB(ctx->opcode);
1685 /* Optimisation for mr. ri case */
1686 if (rs != ra || rs != rb) {
1687 if (rs != rb)
1688 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1689 else
1690 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1691 if (unlikely(Rc(ctx->opcode) != 0))
1692 gen_set_Rc0(ctx, cpu_gpr[ra]);
1693 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1694 gen_set_Rc0(ctx, cpu_gpr[rs]);
1695 #if defined(TARGET_PPC64)
1696 } else if (rs != 0) { /* 0 is nop */
1697 int prio = 0;
1699 switch (rs) {
1700 case 1:
1701 /* Set process priority to low */
1702 prio = 2;
1703 break;
1704 case 6:
1705 /* Set process priority to medium-low */
1706 prio = 3;
1707 break;
1708 case 2:
1709 /* Set process priority to normal */
1710 prio = 4;
1711 break;
1712 #if !defined(CONFIG_USER_ONLY)
1713 case 31:
1714 if (!ctx->pr) {
1715 /* Set process priority to very low */
1716 prio = 1;
1718 break;
1719 case 5:
1720 if (!ctx->pr) {
1721 /* Set process priority to medium-hight */
1722 prio = 5;
1724 break;
1725 case 3:
1726 if (!ctx->pr) {
1727 /* Set process priority to high */
1728 prio = 6;
1730 break;
1731 case 7:
1732 if (ctx->hv && !ctx->pr) {
1733 /* Set process priority to very high */
1734 prio = 7;
1736 break;
1737 #endif
1738 default:
1739 break;
1741 if (prio) {
1742 TCGv t0 = tcg_temp_new();
1743 gen_load_spr(t0, SPR_PPR);
1744 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1745 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1746 gen_store_spr(SPR_PPR, t0);
1747 tcg_temp_free(t0);
1749 #if !defined(CONFIG_USER_ONLY)
1750 /* Pause out of TCG otherwise spin loops with smt_low eat too much
1751 * CPU and the kernel hangs. This applies to all encodings other
1752 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30),
1753 * and all currently undefined.
1755 gen_pause(ctx);
1756 #endif
1757 #endif
1760 /* orc & orc. */
1761 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1763 /* xor & xor. */
1764 static void gen_xor(DisasContext *ctx)
1766 /* Optimisation for "set to zero" case */
1767 if (rS(ctx->opcode) != rB(ctx->opcode))
1768 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1769 else
1770 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1771 if (unlikely(Rc(ctx->opcode) != 0))
1772 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1775 /* ori */
1776 static void gen_ori(DisasContext *ctx)
1778 target_ulong uimm = UIMM(ctx->opcode);
1780 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1781 return;
1783 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1786 /* oris */
1787 static void gen_oris(DisasContext *ctx)
1789 target_ulong uimm = UIMM(ctx->opcode);
1791 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1792 /* NOP */
1793 return;
1795 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1798 /* xori */
1799 static void gen_xori(DisasContext *ctx)
1801 target_ulong uimm = UIMM(ctx->opcode);
1803 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1804 /* NOP */
1805 return;
1807 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1810 /* xoris */
1811 static void gen_xoris(DisasContext *ctx)
1813 target_ulong uimm = UIMM(ctx->opcode);
1815 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1816 /* NOP */
1817 return;
1819 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1822 /* popcntb : PowerPC 2.03 specification */
1823 static void gen_popcntb(DisasContext *ctx)
1825 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1828 static void gen_popcntw(DisasContext *ctx)
1830 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1833 #if defined(TARGET_PPC64)
1834 /* popcntd: PowerPC 2.06 specification */
1835 static void gen_popcntd(DisasContext *ctx)
1837 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1839 #endif
1841 /* prtyw: PowerPC 2.05 specification */
1842 static void gen_prtyw(DisasContext *ctx)
1844 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1845 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1846 TCGv t0 = tcg_temp_new();
1847 tcg_gen_shri_tl(t0, rs, 16);
1848 tcg_gen_xor_tl(ra, rs, t0);
1849 tcg_gen_shri_tl(t0, ra, 8);
1850 tcg_gen_xor_tl(ra, ra, t0);
1851 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
1852 tcg_temp_free(t0);
1855 #if defined(TARGET_PPC64)
1856 /* prtyd: PowerPC 2.05 specification */
1857 static void gen_prtyd(DisasContext *ctx)
1859 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1860 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1861 TCGv t0 = tcg_temp_new();
1862 tcg_gen_shri_tl(t0, rs, 32);
1863 tcg_gen_xor_tl(ra, rs, t0);
1864 tcg_gen_shri_tl(t0, ra, 16);
1865 tcg_gen_xor_tl(ra, ra, t0);
1866 tcg_gen_shri_tl(t0, ra, 8);
1867 tcg_gen_xor_tl(ra, ra, t0);
1868 tcg_gen_andi_tl(ra, ra, 1);
1869 tcg_temp_free(t0);
1871 #endif
1873 #if defined(TARGET_PPC64)
1874 /* bpermd */
1875 static void gen_bpermd(DisasContext *ctx)
1877 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
1878 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1880 #endif
1882 #if defined(TARGET_PPC64)
1883 /* extsw & extsw. */
1884 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1886 /* cntlzd */
1887 static void gen_cntlzd(DisasContext *ctx)
1889 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1890 if (unlikely(Rc(ctx->opcode) != 0))
1891 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1894 /* cnttzd */
1895 static void gen_cnttzd(DisasContext *ctx)
1897 gen_helper_cnttzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1898 if (unlikely(Rc(ctx->opcode) != 0)) {
1899 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1903 /* darn */
1904 static void gen_darn(DisasContext *ctx)
1906 int l = L(ctx->opcode);
1908 if (l == 0) {
1909 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
1910 } else if (l <= 2) {
1911 /* Return 64-bit random for both CRN and RRN */
1912 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
1913 } else {
1914 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
1917 #endif
1919 /*** Integer rotate ***/
1921 /* rlwimi & rlwimi. */
1922 static void gen_rlwimi(DisasContext *ctx)
1924 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
1925 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
1926 uint32_t sh = SH(ctx->opcode);
1927 uint32_t mb = MB(ctx->opcode);
1928 uint32_t me = ME(ctx->opcode);
1930 if (sh == (31-me) && mb <= me) {
1931 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
1932 } else {
1933 target_ulong mask;
1934 TCGv t1;
1936 #if defined(TARGET_PPC64)
1937 mb += 32;
1938 me += 32;
1939 #endif
1940 mask = MASK(mb, me);
1942 t1 = tcg_temp_new();
1943 if (mask <= 0xffffffffu) {
1944 TCGv_i32 t0 = tcg_temp_new_i32();
1945 tcg_gen_trunc_tl_i32(t0, t_rs);
1946 tcg_gen_rotli_i32(t0, t0, sh);
1947 tcg_gen_extu_i32_tl(t1, t0);
1948 tcg_temp_free_i32(t0);
1949 } else {
1950 #if defined(TARGET_PPC64)
1951 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
1952 tcg_gen_rotli_i64(t1, t1, sh);
1953 #else
1954 g_assert_not_reached();
1955 #endif
1958 tcg_gen_andi_tl(t1, t1, mask);
1959 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
1960 tcg_gen_or_tl(t_ra, t_ra, t1);
1961 tcg_temp_free(t1);
1963 if (unlikely(Rc(ctx->opcode) != 0)) {
1964 gen_set_Rc0(ctx, t_ra);
1968 /* rlwinm & rlwinm. */
1969 static void gen_rlwinm(DisasContext *ctx)
1971 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
1972 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
1973 uint32_t sh = SH(ctx->opcode);
1974 uint32_t mb = MB(ctx->opcode);
1975 uint32_t me = ME(ctx->opcode);
1977 if (mb == 0 && me == (31 - sh)) {
1978 tcg_gen_shli_tl(t_ra, t_rs, sh);
1979 tcg_gen_ext32u_tl(t_ra, t_ra);
1980 } else if (sh != 0 && me == 31 && sh == (32 - mb)) {
1981 tcg_gen_ext32u_tl(t_ra, t_rs);
1982 tcg_gen_shri_tl(t_ra, t_ra, mb);
1983 } else {
1984 target_ulong mask;
1985 #if defined(TARGET_PPC64)
1986 mb += 32;
1987 me += 32;
1988 #endif
1989 mask = MASK(mb, me);
1991 if (mask <= 0xffffffffu) {
1992 TCGv_i32 t0 = tcg_temp_new_i32();
1993 tcg_gen_trunc_tl_i32(t0, t_rs);
1994 tcg_gen_rotli_i32(t0, t0, sh);
1995 tcg_gen_andi_i32(t0, t0, mask);
1996 tcg_gen_extu_i32_tl(t_ra, t0);
1997 tcg_temp_free_i32(t0);
1998 } else {
1999 #if defined(TARGET_PPC64)
2000 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2001 tcg_gen_rotli_i64(t_ra, t_ra, sh);
2002 tcg_gen_andi_i64(t_ra, t_ra, mask);
2003 #else
2004 g_assert_not_reached();
2005 #endif
2008 if (unlikely(Rc(ctx->opcode) != 0)) {
2009 gen_set_Rc0(ctx, t_ra);
2013 /* rlwnm & rlwnm. */
2014 static void gen_rlwnm(DisasContext *ctx)
2016 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2017 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2018 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2019 uint32_t mb = MB(ctx->opcode);
2020 uint32_t me = ME(ctx->opcode);
2021 target_ulong mask;
2023 #if defined(TARGET_PPC64)
2024 mb += 32;
2025 me += 32;
2026 #endif
2027 mask = MASK(mb, me);
2029 if (mask <= 0xffffffffu) {
2030 TCGv_i32 t0 = tcg_temp_new_i32();
2031 TCGv_i32 t1 = tcg_temp_new_i32();
2032 tcg_gen_trunc_tl_i32(t0, t_rb);
2033 tcg_gen_trunc_tl_i32(t1, t_rs);
2034 tcg_gen_andi_i32(t0, t0, 0x1f);
2035 tcg_gen_rotl_i32(t1, t1, t0);
2036 tcg_gen_extu_i32_tl(t_ra, t1);
2037 tcg_temp_free_i32(t0);
2038 tcg_temp_free_i32(t1);
2039 } else {
2040 #if defined(TARGET_PPC64)
2041 TCGv_i64 t0 = tcg_temp_new_i64();
2042 tcg_gen_andi_i64(t0, t_rb, 0x1f);
2043 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2044 tcg_gen_rotl_i64(t_ra, t_ra, t0);
2045 tcg_temp_free_i64(t0);
2046 #else
2047 g_assert_not_reached();
2048 #endif
2051 tcg_gen_andi_tl(t_ra, t_ra, mask);
2053 if (unlikely(Rc(ctx->opcode) != 0)) {
2054 gen_set_Rc0(ctx, t_ra);
2058 #if defined(TARGET_PPC64)
2059 #define GEN_PPC64_R2(name, opc1, opc2) \
2060 static void glue(gen_, name##0)(DisasContext *ctx) \
2062 gen_##name(ctx, 0); \
2065 static void glue(gen_, name##1)(DisasContext *ctx) \
2067 gen_##name(ctx, 1); \
2069 #define GEN_PPC64_R4(name, opc1, opc2) \
2070 static void glue(gen_, name##0)(DisasContext *ctx) \
2072 gen_##name(ctx, 0, 0); \
2075 static void glue(gen_, name##1)(DisasContext *ctx) \
2077 gen_##name(ctx, 0, 1); \
2080 static void glue(gen_, name##2)(DisasContext *ctx) \
2082 gen_##name(ctx, 1, 0); \
2085 static void glue(gen_, name##3)(DisasContext *ctx) \
2087 gen_##name(ctx, 1, 1); \
2090 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2092 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2093 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2095 if (sh != 0 && mb == 0 && me == (63 - sh)) {
2096 tcg_gen_shli_tl(t_ra, t_rs, sh);
2097 } else if (sh != 0 && me == 63 && sh == (64 - mb)) {
2098 tcg_gen_shri_tl(t_ra, t_rs, mb);
2099 } else {
2100 tcg_gen_rotli_tl(t_ra, t_rs, sh);
2101 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2103 if (unlikely(Rc(ctx->opcode) != 0)) {
2104 gen_set_Rc0(ctx, t_ra);
2108 /* rldicl - rldicl. */
2109 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2111 uint32_t sh, mb;
2113 sh = SH(ctx->opcode) | (shn << 5);
2114 mb = MB(ctx->opcode) | (mbn << 5);
2115 gen_rldinm(ctx, mb, 63, sh);
2117 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2119 /* rldicr - rldicr. */
2120 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2122 uint32_t sh, me;
2124 sh = SH(ctx->opcode) | (shn << 5);
2125 me = MB(ctx->opcode) | (men << 5);
2126 gen_rldinm(ctx, 0, me, sh);
2128 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2130 /* rldic - rldic. */
2131 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2133 uint32_t sh, mb;
2135 sh = SH(ctx->opcode) | (shn << 5);
2136 mb = MB(ctx->opcode) | (mbn << 5);
2137 gen_rldinm(ctx, mb, 63 - sh, sh);
2139 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2141 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2143 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2144 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2145 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2146 TCGv t0;
2148 t0 = tcg_temp_new();
2149 tcg_gen_andi_tl(t0, t_rb, 0x3f);
2150 tcg_gen_rotl_tl(t_ra, t_rs, t0);
2151 tcg_temp_free(t0);
2153 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2154 if (unlikely(Rc(ctx->opcode) != 0)) {
2155 gen_set_Rc0(ctx, t_ra);
2159 /* rldcl - rldcl. */
2160 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2162 uint32_t mb;
2164 mb = MB(ctx->opcode) | (mbn << 5);
2165 gen_rldnm(ctx, mb, 63);
2167 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2169 /* rldcr - rldcr. */
2170 static inline void gen_rldcr(DisasContext *ctx, int men)
2172 uint32_t me;
2174 me = MB(ctx->opcode) | (men << 5);
2175 gen_rldnm(ctx, 0, me);
2177 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2179 /* rldimi - rldimi. */
2180 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2182 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2183 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2184 uint32_t sh = SH(ctx->opcode) | (shn << 5);
2185 uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2186 uint32_t me = 63 - sh;
2188 if (mb <= me) {
2189 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2190 } else {
2191 target_ulong mask = MASK(mb, me);
2192 TCGv t1 = tcg_temp_new();
2194 tcg_gen_rotli_tl(t1, t_rs, sh);
2195 tcg_gen_andi_tl(t1, t1, mask);
2196 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2197 tcg_gen_or_tl(t_ra, t_ra, t1);
2198 tcg_temp_free(t1);
2200 if (unlikely(Rc(ctx->opcode) != 0)) {
2201 gen_set_Rc0(ctx, t_ra);
2204 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2205 #endif
2207 /*** Integer shift ***/
2209 /* slw & slw. */
2210 static void gen_slw(DisasContext *ctx)
2212 TCGv t0, t1;
2214 t0 = tcg_temp_new();
2215 /* AND rS with a mask that is 0 when rB >= 0x20 */
2216 #if defined(TARGET_PPC64)
2217 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2218 tcg_gen_sari_tl(t0, t0, 0x3f);
2219 #else
2220 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2221 tcg_gen_sari_tl(t0, t0, 0x1f);
2222 #endif
2223 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2224 t1 = tcg_temp_new();
2225 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2226 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2227 tcg_temp_free(t1);
2228 tcg_temp_free(t0);
2229 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2230 if (unlikely(Rc(ctx->opcode) != 0))
2231 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2234 /* sraw & sraw. */
2235 static void gen_sraw(DisasContext *ctx)
2237 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2238 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2239 if (unlikely(Rc(ctx->opcode) != 0))
2240 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2243 /* srawi & srawi. */
2244 static void gen_srawi(DisasContext *ctx)
2246 int sh = SH(ctx->opcode);
2247 TCGv dst = cpu_gpr[rA(ctx->opcode)];
2248 TCGv src = cpu_gpr[rS(ctx->opcode)];
2249 if (sh == 0) {
2250 tcg_gen_ext32s_tl(dst, src);
2251 tcg_gen_movi_tl(cpu_ca, 0);
2252 } else {
2253 TCGv t0;
2254 tcg_gen_ext32s_tl(dst, src);
2255 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2256 t0 = tcg_temp_new();
2257 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2258 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2259 tcg_temp_free(t0);
2260 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2261 tcg_gen_sari_tl(dst, dst, sh);
2263 if (unlikely(Rc(ctx->opcode) != 0)) {
2264 gen_set_Rc0(ctx, dst);
2268 /* srw & srw. */
2269 static void gen_srw(DisasContext *ctx)
2271 TCGv t0, t1;
2273 t0 = tcg_temp_new();
2274 /* AND rS with a mask that is 0 when rB >= 0x20 */
2275 #if defined(TARGET_PPC64)
2276 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2277 tcg_gen_sari_tl(t0, t0, 0x3f);
2278 #else
2279 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2280 tcg_gen_sari_tl(t0, t0, 0x1f);
2281 #endif
2282 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2283 tcg_gen_ext32u_tl(t0, t0);
2284 t1 = tcg_temp_new();
2285 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2286 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2287 tcg_temp_free(t1);
2288 tcg_temp_free(t0);
2289 if (unlikely(Rc(ctx->opcode) != 0))
2290 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2293 #if defined(TARGET_PPC64)
2294 /* sld & sld. */
2295 static void gen_sld(DisasContext *ctx)
2297 TCGv t0, t1;
2299 t0 = tcg_temp_new();
2300 /* AND rS with a mask that is 0 when rB >= 0x40 */
2301 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2302 tcg_gen_sari_tl(t0, t0, 0x3f);
2303 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2304 t1 = tcg_temp_new();
2305 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2306 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2307 tcg_temp_free(t1);
2308 tcg_temp_free(t0);
2309 if (unlikely(Rc(ctx->opcode) != 0))
2310 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2313 /* srad & srad. */
2314 static void gen_srad(DisasContext *ctx)
2316 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
2317 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2318 if (unlikely(Rc(ctx->opcode) != 0))
2319 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2321 /* sradi & sradi. */
2322 static inline void gen_sradi(DisasContext *ctx, int n)
2324 int sh = SH(ctx->opcode) + (n << 5);
2325 TCGv dst = cpu_gpr[rA(ctx->opcode)];
2326 TCGv src = cpu_gpr[rS(ctx->opcode)];
2327 if (sh == 0) {
2328 tcg_gen_mov_tl(dst, src);
2329 tcg_gen_movi_tl(cpu_ca, 0);
2330 } else {
2331 TCGv t0;
2332 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2333 t0 = tcg_temp_new();
2334 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2335 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2336 tcg_temp_free(t0);
2337 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2338 tcg_gen_sari_tl(dst, src, sh);
2340 if (unlikely(Rc(ctx->opcode) != 0)) {
2341 gen_set_Rc0(ctx, dst);
2345 static void gen_sradi0(DisasContext *ctx)
2347 gen_sradi(ctx, 0);
2350 static void gen_sradi1(DisasContext *ctx)
2352 gen_sradi(ctx, 1);
2355 /* extswsli & extswsli. */
2356 static inline void gen_extswsli(DisasContext *ctx, int n)
2358 int sh = SH(ctx->opcode) + (n << 5);
2359 TCGv dst = cpu_gpr[rA(ctx->opcode)];
2360 TCGv src = cpu_gpr[rS(ctx->opcode)];
2362 tcg_gen_ext32s_tl(dst, src);
2363 tcg_gen_shli_tl(dst, dst, sh);
2364 if (unlikely(Rc(ctx->opcode) != 0)) {
2365 gen_set_Rc0(ctx, dst);
2369 static void gen_extswsli0(DisasContext *ctx)
2371 gen_extswsli(ctx, 0);
2374 static void gen_extswsli1(DisasContext *ctx)
2376 gen_extswsli(ctx, 1);
2379 /* srd & srd. */
2380 static void gen_srd(DisasContext *ctx)
2382 TCGv t0, t1;
2384 t0 = tcg_temp_new();
2385 /* AND rS with a mask that is 0 when rB >= 0x40 */
2386 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2387 tcg_gen_sari_tl(t0, t0, 0x3f);
2388 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2389 t1 = tcg_temp_new();
2390 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2391 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2392 tcg_temp_free(t1);
2393 tcg_temp_free(t0);
2394 if (unlikely(Rc(ctx->opcode) != 0))
2395 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2397 #endif
2399 /*** Addressing modes ***/
2400 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2401 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2402 target_long maskl)
2404 target_long simm = SIMM(ctx->opcode);
2406 simm &= ~maskl;
2407 if (rA(ctx->opcode) == 0) {
2408 if (NARROW_MODE(ctx)) {
2409 simm = (uint32_t)simm;
2411 tcg_gen_movi_tl(EA, simm);
2412 } else if (likely(simm != 0)) {
2413 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2414 if (NARROW_MODE(ctx)) {
2415 tcg_gen_ext32u_tl(EA, EA);
2417 } else {
2418 if (NARROW_MODE(ctx)) {
2419 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2420 } else {
2421 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2426 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2428 if (rA(ctx->opcode) == 0) {
2429 if (NARROW_MODE(ctx)) {
2430 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2431 } else {
2432 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2434 } else {
2435 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2436 if (NARROW_MODE(ctx)) {
2437 tcg_gen_ext32u_tl(EA, EA);
2442 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2444 if (rA(ctx->opcode) == 0) {
2445 tcg_gen_movi_tl(EA, 0);
2446 } else if (NARROW_MODE(ctx)) {
2447 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2448 } else {
2449 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2453 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2454 target_long val)
2456 tcg_gen_addi_tl(ret, arg1, val);
2457 if (NARROW_MODE(ctx)) {
2458 tcg_gen_ext32u_tl(ret, ret);
2462 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2464 TCGLabel *l1 = gen_new_label();
2465 TCGv t0 = tcg_temp_new();
2466 TCGv_i32 t1, t2;
2467 tcg_gen_andi_tl(t0, EA, mask);
2468 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2469 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2470 t2 = tcg_const_i32(ctx->opcode & 0x03FF0000);
2471 gen_update_nip(ctx, ctx->nip - 4);
2472 gen_helper_raise_exception_err(cpu_env, t1, t2);
2473 tcg_temp_free_i32(t1);
2474 tcg_temp_free_i32(t2);
2475 gen_set_label(l1);
2476 tcg_temp_free(t0);
2479 static inline void gen_align_no_le(DisasContext *ctx)
2481 gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
2482 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
2485 /*** Integer load ***/
2486 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
2487 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
2489 #define GEN_QEMU_LOAD_TL(ldop, op) \
2490 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \
2491 TCGv val, \
2492 TCGv addr) \
2494 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \
2497 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB))
2498 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
2499 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
2500 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
2501 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
2503 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
2504 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
2506 #define GEN_QEMU_LOAD_64(ldop, op) \
2507 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \
2508 TCGv_i64 val, \
2509 TCGv addr) \
2511 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \
2514 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB))
2515 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
2516 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
2517 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
2518 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q))
2520 #if defined(TARGET_PPC64)
2521 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q))
2522 #endif
2524 #define GEN_QEMU_STORE_TL(stop, op) \
2525 static void glue(gen_qemu_, stop)(DisasContext *ctx, \
2526 TCGv val, \
2527 TCGv addr) \
2529 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \
2532 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB))
2533 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
2534 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
2536 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
2537 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
2539 #define GEN_QEMU_STORE_64(stop, op) \
2540 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \
2541 TCGv_i64 val, \
2542 TCGv addr) \
2544 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \
2547 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB))
2548 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
2549 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
2550 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q))
2552 #if defined(TARGET_PPC64)
2553 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q))
2554 #endif
2556 #define GEN_LD(name, ldop, opc, type) \
2557 static void glue(gen_, name)(DisasContext *ctx) \
2559 TCGv EA; \
2560 gen_set_access_type(ctx, ACCESS_INT); \
2561 EA = tcg_temp_new(); \
2562 gen_addr_imm_index(ctx, EA, 0); \
2563 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2564 tcg_temp_free(EA); \
2567 #define GEN_LDU(name, ldop, opc, type) \
2568 static void glue(gen_, name##u)(DisasContext *ctx) \
2570 TCGv EA; \
2571 if (unlikely(rA(ctx->opcode) == 0 || \
2572 rA(ctx->opcode) == rD(ctx->opcode))) { \
2573 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2574 return; \
2576 gen_set_access_type(ctx, ACCESS_INT); \
2577 EA = tcg_temp_new(); \
2578 if (type == PPC_64B) \
2579 gen_addr_imm_index(ctx, EA, 0x03); \
2580 else \
2581 gen_addr_imm_index(ctx, EA, 0); \
2582 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2583 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2584 tcg_temp_free(EA); \
2587 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2588 static void glue(gen_, name##ux)(DisasContext *ctx) \
2590 TCGv EA; \
2591 if (unlikely(rA(ctx->opcode) == 0 || \
2592 rA(ctx->opcode) == rD(ctx->opcode))) { \
2593 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2594 return; \
2596 gen_set_access_type(ctx, ACCESS_INT); \
2597 EA = tcg_temp_new(); \
2598 gen_addr_reg_index(ctx, EA); \
2599 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2600 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2601 tcg_temp_free(EA); \
2604 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
2605 static void glue(gen_, name##x)(DisasContext *ctx) \
2607 TCGv EA; \
2608 chk; \
2609 gen_set_access_type(ctx, ACCESS_INT); \
2610 EA = tcg_temp_new(); \
2611 gen_addr_reg_index(ctx, EA); \
2612 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2613 tcg_temp_free(EA); \
2616 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2617 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2619 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
2620 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2622 #define GEN_LDS(name, ldop, op, type) \
2623 GEN_LD(name, ldop, op | 0x20, type); \
2624 GEN_LDU(name, ldop, op | 0x21, type); \
2625 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2626 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2628 /* lbz lbzu lbzux lbzx */
2629 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2630 /* lha lhau lhaux lhax */
2631 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2632 /* lhz lhzu lhzux lhzx */
2633 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2634 /* lwz lwzu lwzux lwzx */
2635 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2636 #if defined(TARGET_PPC64)
2637 /* lwaux */
2638 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2639 /* lwax */
2640 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2641 /* ldux */
2642 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B);
2643 /* ldx */
2644 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B);
2646 /* CI load/store variants */
2647 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
2648 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
2649 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
2650 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
2652 static void gen_ld(DisasContext *ctx)
2654 TCGv EA;
2655 if (Rc(ctx->opcode)) {
2656 if (unlikely(rA(ctx->opcode) == 0 ||
2657 rA(ctx->opcode) == rD(ctx->opcode))) {
2658 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2659 return;
2662 gen_set_access_type(ctx, ACCESS_INT);
2663 EA = tcg_temp_new();
2664 gen_addr_imm_index(ctx, EA, 0x03);
2665 if (ctx->opcode & 0x02) {
2666 /* lwa (lwau is undefined) */
2667 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2668 } else {
2669 /* ld - ldu */
2670 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2672 if (Rc(ctx->opcode))
2673 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2674 tcg_temp_free(EA);
2677 /* lq */
2678 static void gen_lq(DisasContext *ctx)
2680 int ra, rd;
2681 TCGv EA;
2683 /* lq is a legal user mode instruction starting in ISA 2.07 */
2684 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2685 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2687 if (!legal_in_user_mode && ctx->pr) {
2688 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2689 return;
2692 if (!le_is_supported && ctx->le_mode) {
2693 gen_align_no_le(ctx);
2694 return;
2696 ra = rA(ctx->opcode);
2697 rd = rD(ctx->opcode);
2698 if (unlikely((rd & 1) || rd == ra)) {
2699 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2700 return;
2703 gen_set_access_type(ctx, ACCESS_INT);
2704 EA = tcg_temp_new();
2705 gen_addr_imm_index(ctx, EA, 0x0F);
2707 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does
2708 necessary 64-bit byteswap already. */
2709 if (unlikely(ctx->le_mode)) {
2710 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA);
2711 gen_addr_add(ctx, EA, EA, 8);
2712 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA);
2713 } else {
2714 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA);
2715 gen_addr_add(ctx, EA, EA, 8);
2716 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA);
2718 tcg_temp_free(EA);
2720 #endif
2722 /*** Integer store ***/
2723 #define GEN_ST(name, stop, opc, type) \
2724 static void glue(gen_, name)(DisasContext *ctx) \
2726 TCGv EA; \
2727 gen_set_access_type(ctx, ACCESS_INT); \
2728 EA = tcg_temp_new(); \
2729 gen_addr_imm_index(ctx, EA, 0); \
2730 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2731 tcg_temp_free(EA); \
2734 #define GEN_STU(name, stop, opc, type) \
2735 static void glue(gen_, stop##u)(DisasContext *ctx) \
2737 TCGv EA; \
2738 if (unlikely(rA(ctx->opcode) == 0)) { \
2739 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2740 return; \
2742 gen_set_access_type(ctx, ACCESS_INT); \
2743 EA = tcg_temp_new(); \
2744 if (type == PPC_64B) \
2745 gen_addr_imm_index(ctx, EA, 0x03); \
2746 else \
2747 gen_addr_imm_index(ctx, EA, 0); \
2748 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2749 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2750 tcg_temp_free(EA); \
2753 #define GEN_STUX(name, stop, opc2, opc3, type) \
2754 static void glue(gen_, name##ux)(DisasContext *ctx) \
2756 TCGv EA; \
2757 if (unlikely(rA(ctx->opcode) == 0)) { \
2758 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2759 return; \
2761 gen_set_access_type(ctx, ACCESS_INT); \
2762 EA = tcg_temp_new(); \
2763 gen_addr_reg_index(ctx, EA); \
2764 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2765 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2766 tcg_temp_free(EA); \
2769 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
2770 static void glue(gen_, name##x)(DisasContext *ctx) \
2772 TCGv EA; \
2773 chk; \
2774 gen_set_access_type(ctx, ACCESS_INT); \
2775 EA = tcg_temp_new(); \
2776 gen_addr_reg_index(ctx, EA); \
2777 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2778 tcg_temp_free(EA); \
2780 #define GEN_STX(name, stop, opc2, opc3, type) \
2781 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2783 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
2784 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2786 #define GEN_STS(name, stop, op, type) \
2787 GEN_ST(name, stop, op | 0x20, type); \
2788 GEN_STU(name, stop, op | 0x21, type); \
2789 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2790 GEN_STX(name, stop, 0x17, op | 0x00, type)
2792 /* stb stbu stbux stbx */
2793 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2794 /* sth sthu sthux sthx */
2795 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2796 /* stw stwu stwux stwx */
2797 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2798 #if defined(TARGET_PPC64)
2799 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B);
2800 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B);
2801 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
2802 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
2803 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
2804 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
2806 static void gen_std(DisasContext *ctx)
2808 int rs;
2809 TCGv EA;
2811 rs = rS(ctx->opcode);
2812 if ((ctx->opcode & 0x3) == 0x2) { /* stq */
2813 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2814 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2816 if (!(ctx->insns_flags & PPC_64BX)) {
2817 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2820 if (!legal_in_user_mode && ctx->pr) {
2821 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2822 return;
2825 if (!le_is_supported && ctx->le_mode) {
2826 gen_align_no_le(ctx);
2827 return;
2830 if (unlikely(rs & 1)) {
2831 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2832 return;
2834 gen_set_access_type(ctx, ACCESS_INT);
2835 EA = tcg_temp_new();
2836 gen_addr_imm_index(ctx, EA, 0x03);
2838 /* We only need to swap high and low halves. gen_qemu_st64_i64 does
2839 necessary 64-bit byteswap already. */
2840 if (unlikely(ctx->le_mode)) {
2841 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA);
2842 gen_addr_add(ctx, EA, EA, 8);
2843 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
2844 } else {
2845 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
2846 gen_addr_add(ctx, EA, EA, 8);
2847 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA);
2849 tcg_temp_free(EA);
2850 } else {
2851 /* std / stdu*/
2852 if (Rc(ctx->opcode)) {
2853 if (unlikely(rA(ctx->opcode) == 0)) {
2854 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2855 return;
2858 gen_set_access_type(ctx, ACCESS_INT);
2859 EA = tcg_temp_new();
2860 gen_addr_imm_index(ctx, EA, 0x03);
2861 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
2862 if (Rc(ctx->opcode))
2863 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2864 tcg_temp_free(EA);
2867 #endif
2868 /*** Integer load and store with byte reverse ***/
2870 /* lhbrx */
2871 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2873 /* lwbrx */
2874 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2876 #if defined(TARGET_PPC64)
2877 /* ldbrx */
2878 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
2879 /* stdbrx */
2880 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
2881 #endif /* TARGET_PPC64 */
2883 /* sthbrx */
2884 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2885 /* stwbrx */
2886 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2888 /*** Integer load and store multiple ***/
2890 /* lmw */
2891 static void gen_lmw(DisasContext *ctx)
2893 TCGv t0;
2894 TCGv_i32 t1;
2896 if (ctx->le_mode) {
2897 gen_align_no_le(ctx);
2898 return;
2900 gen_set_access_type(ctx, ACCESS_INT);
2901 t0 = tcg_temp_new();
2902 t1 = tcg_const_i32(rD(ctx->opcode));
2903 gen_addr_imm_index(ctx, t0, 0);
2904 gen_helper_lmw(cpu_env, t0, t1);
2905 tcg_temp_free(t0);
2906 tcg_temp_free_i32(t1);
2909 /* stmw */
2910 static void gen_stmw(DisasContext *ctx)
2912 TCGv t0;
2913 TCGv_i32 t1;
2915 if (ctx->le_mode) {
2916 gen_align_no_le(ctx);
2917 return;
2919 gen_set_access_type(ctx, ACCESS_INT);
2920 t0 = tcg_temp_new();
2921 t1 = tcg_const_i32(rS(ctx->opcode));
2922 gen_addr_imm_index(ctx, t0, 0);
2923 gen_helper_stmw(cpu_env, t0, t1);
2924 tcg_temp_free(t0);
2925 tcg_temp_free_i32(t1);
2928 /*** Integer load and store strings ***/
2930 /* lswi */
2931 /* PowerPC32 specification says we must generate an exception if
2932 * rA is in the range of registers to be loaded.
2933 * In an other hand, IBM says this is valid, but rA won't be loaded.
2934 * For now, I'll follow the spec...
2936 static void gen_lswi(DisasContext *ctx)
2938 TCGv t0;
2939 TCGv_i32 t1, t2;
2940 int nb = NB(ctx->opcode);
2941 int start = rD(ctx->opcode);
2942 int ra = rA(ctx->opcode);
2943 int nr;
2945 if (ctx->le_mode) {
2946 gen_align_no_le(ctx);
2947 return;
2949 if (nb == 0)
2950 nb = 32;
2951 nr = (nb + 3) / 4;
2952 if (unlikely(lsw_reg_in_range(start, nr, ra))) {
2953 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2954 return;
2956 gen_set_access_type(ctx, ACCESS_INT);
2957 t0 = tcg_temp_new();
2958 gen_addr_register(ctx, t0);
2959 t1 = tcg_const_i32(nb);
2960 t2 = tcg_const_i32(start);
2961 gen_helper_lsw(cpu_env, t0, t1, t2);
2962 tcg_temp_free(t0);
2963 tcg_temp_free_i32(t1);
2964 tcg_temp_free_i32(t2);
2967 /* lswx */
2968 static void gen_lswx(DisasContext *ctx)
2970 TCGv t0;
2971 TCGv_i32 t1, t2, t3;
2973 if (ctx->le_mode) {
2974 gen_align_no_le(ctx);
2975 return;
2977 gen_set_access_type(ctx, ACCESS_INT);
2978 t0 = tcg_temp_new();
2979 gen_addr_reg_index(ctx, t0);
2980 t1 = tcg_const_i32(rD(ctx->opcode));
2981 t2 = tcg_const_i32(rA(ctx->opcode));
2982 t3 = tcg_const_i32(rB(ctx->opcode));
2983 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
2984 tcg_temp_free(t0);
2985 tcg_temp_free_i32(t1);
2986 tcg_temp_free_i32(t2);
2987 tcg_temp_free_i32(t3);
2990 /* stswi */
2991 static void gen_stswi(DisasContext *ctx)
2993 TCGv t0;
2994 TCGv_i32 t1, t2;
2995 int nb = NB(ctx->opcode);
2997 if (ctx->le_mode) {
2998 gen_align_no_le(ctx);
2999 return;
3001 gen_set_access_type(ctx, ACCESS_INT);
3002 t0 = tcg_temp_new();
3003 gen_addr_register(ctx, t0);
3004 if (nb == 0)
3005 nb = 32;
3006 t1 = tcg_const_i32(nb);
3007 t2 = tcg_const_i32(rS(ctx->opcode));
3008 gen_helper_stsw(cpu_env, t0, t1, t2);
3009 tcg_temp_free(t0);
3010 tcg_temp_free_i32(t1);
3011 tcg_temp_free_i32(t2);
3014 /* stswx */
3015 static void gen_stswx(DisasContext *ctx)
3017 TCGv t0;
3018 TCGv_i32 t1, t2;
3020 if (ctx->le_mode) {
3021 gen_align_no_le(ctx);
3022 return;
3024 gen_set_access_type(ctx, ACCESS_INT);
3025 t0 = tcg_temp_new();
3026 gen_addr_reg_index(ctx, t0);
3027 t1 = tcg_temp_new_i32();
3028 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3029 tcg_gen_andi_i32(t1, t1, 0x7F);
3030 t2 = tcg_const_i32(rS(ctx->opcode));
3031 gen_helper_stsw(cpu_env, t0, t1, t2);
3032 tcg_temp_free(t0);
3033 tcg_temp_free_i32(t1);
3034 tcg_temp_free_i32(t2);
3037 /*** Memory synchronisation ***/
3038 /* eieio */
3039 static void gen_eieio(DisasContext *ctx)
3043 #if !defined(CONFIG_USER_ONLY)
3044 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3046 TCGv_i32 t;
3047 TCGLabel *l;
3049 if (!ctx->lazy_tlb_flush) {
3050 return;
3052 l = gen_new_label();
3053 t = tcg_temp_new_i32();
3054 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3055 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3056 if (global) {
3057 gen_helper_check_tlb_flush_global(cpu_env);
3058 } else {
3059 gen_helper_check_tlb_flush_local(cpu_env);
3061 gen_set_label(l);
3062 tcg_temp_free_i32(t);
3064 #else
3065 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3066 #endif
3068 /* isync */
3069 static void gen_isync(DisasContext *ctx)
3072 * We need to check for a pending TLB flush. This can only happen in
3073 * kernel mode however so check MSR_PR
3075 if (!ctx->pr) {
3076 gen_check_tlb_flush(ctx, false);
3078 gen_stop_exception(ctx);
3081 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE))
3083 #define LARX(name, memop) \
3084 static void gen_##name(DisasContext *ctx) \
3086 TCGv t0; \
3087 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; \
3088 int len = MEMOP_GET_SIZE(memop); \
3089 gen_set_access_type(ctx, ACCESS_RES); \
3090 t0 = tcg_temp_local_new(); \
3091 gen_addr_reg_index(ctx, t0); \
3092 if ((len) > 1) { \
3093 gen_check_align(ctx, t0, (len)-1); \
3095 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop); \
3096 tcg_gen_mov_tl(cpu_reserve, t0); \
3097 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val)); \
3098 tcg_temp_free(t0); \
3101 /* lwarx */
3102 LARX(lbarx, DEF_MEMOP(MO_UB))
3103 LARX(lharx, DEF_MEMOP(MO_UW))
3104 LARX(lwarx, DEF_MEMOP(MO_UL))
3106 #if defined(CONFIG_USER_ONLY)
3107 static void gen_conditional_store(DisasContext *ctx, TCGv EA,
3108 int reg, int memop)
3110 TCGv t0 = tcg_temp_new();
3112 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea));
3113 tcg_gen_movi_tl(t0, (MEMOP_GET_SIZE(memop) << 5) | reg);
3114 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info));
3115 tcg_temp_free(t0);
3116 gen_exception_err(ctx, POWERPC_EXCP_STCX, 0);
3118 #else
3119 static void gen_conditional_store(DisasContext *ctx, TCGv EA,
3120 int reg, int memop)
3122 TCGLabel *l1;
3124 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3125 l1 = gen_new_label();
3126 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1);
3127 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3128 tcg_gen_qemu_st_tl(cpu_gpr[reg], EA, ctx->mem_idx, memop);
3129 gen_set_label(l1);
3130 tcg_gen_movi_tl(cpu_reserve, -1);
3132 #endif
3134 #define STCX(name, memop) \
3135 static void gen_##name(DisasContext *ctx) \
3137 TCGv t0; \
3138 int len = MEMOP_GET_SIZE(memop); \
3139 gen_set_access_type(ctx, ACCESS_RES); \
3140 t0 = tcg_temp_local_new(); \
3141 gen_addr_reg_index(ctx, t0); \
3142 if (len > 1) { \
3143 gen_check_align(ctx, t0, (len) - 1); \
3145 gen_conditional_store(ctx, t0, rS(ctx->opcode), memop); \
3146 tcg_temp_free(t0); \
3149 STCX(stbcx_, DEF_MEMOP(MO_UB))
3150 STCX(sthcx_, DEF_MEMOP(MO_UW))
3151 STCX(stwcx_, DEF_MEMOP(MO_UL))
3153 #if defined(TARGET_PPC64)
3154 /* ldarx */
3155 LARX(ldarx, DEF_MEMOP(MO_Q))
3156 /* stdcx. */
3157 STCX(stdcx_, DEF_MEMOP(MO_Q))
3159 /* lqarx */
3160 static void gen_lqarx(DisasContext *ctx)
3162 TCGv EA;
3163 int rd = rD(ctx->opcode);
3164 TCGv gpr1, gpr2;
3166 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3167 (rd == rB(ctx->opcode)))) {
3168 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3169 return;
3172 gen_set_access_type(ctx, ACCESS_RES);
3173 EA = tcg_temp_local_new();
3174 gen_addr_reg_index(ctx, EA);
3175 gen_check_align(ctx, EA, 15);
3176 if (unlikely(ctx->le_mode)) {
3177 gpr1 = cpu_gpr[rd+1];
3178 gpr2 = cpu_gpr[rd];
3179 } else {
3180 gpr1 = cpu_gpr[rd];
3181 gpr2 = cpu_gpr[rd+1];
3183 tcg_gen_qemu_ld_i64(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3184 tcg_gen_mov_tl(cpu_reserve, EA);
3185 gen_addr_add(ctx, EA, EA, 8);
3186 tcg_gen_qemu_ld_i64(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3188 tcg_gen_st_tl(gpr1, cpu_env, offsetof(CPUPPCState, reserve_val));
3189 tcg_gen_st_tl(gpr2, cpu_env, offsetof(CPUPPCState, reserve_val2));
3190 tcg_temp_free(EA);
3193 /* stqcx. */
3194 static void gen_stqcx_(DisasContext *ctx)
3196 TCGv EA;
3197 int reg = rS(ctx->opcode);
3198 int len = 16;
3199 #if !defined(CONFIG_USER_ONLY)
3200 TCGLabel *l1;
3201 TCGv gpr1, gpr2;
3202 #endif
3204 if (unlikely((rD(ctx->opcode) & 1))) {
3205 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3206 return;
3208 gen_set_access_type(ctx, ACCESS_RES);
3209 EA = tcg_temp_local_new();
3210 gen_addr_reg_index(ctx, EA);
3211 if (len > 1) {
3212 gen_check_align(ctx, EA, (len) - 1);
3215 #if defined(CONFIG_USER_ONLY)
3216 gen_conditional_store(ctx, EA, reg, 16);
3217 #else
3218 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3219 l1 = gen_new_label();
3220 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1);
3221 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3223 if (unlikely(ctx->le_mode)) {
3224 gpr1 = cpu_gpr[reg + 1];
3225 gpr2 = cpu_gpr[reg];
3226 } else {
3227 gpr1 = cpu_gpr[reg];
3228 gpr2 = cpu_gpr[reg + 1];
3230 tcg_gen_qemu_st_tl(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3231 gen_addr_add(ctx, EA, EA, 8);
3232 tcg_gen_qemu_st_tl(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3234 gen_set_label(l1);
3235 tcg_gen_movi_tl(cpu_reserve, -1);
3236 #endif
3237 tcg_temp_free(EA);
3240 #endif /* defined(TARGET_PPC64) */
3242 /* sync */
3243 static void gen_sync(DisasContext *ctx)
3245 uint32_t l = (ctx->opcode >> 21) & 3;
3248 * We may need to check for a pending TLB flush.
3250 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
3252 * Additionally, this can only happen in kernel mode however so
3253 * check MSR_PR as well.
3255 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
3256 gen_check_tlb_flush(ctx, true);
3260 /* wait */
3261 static void gen_wait(DisasContext *ctx)
3263 TCGv_i32 t0 = tcg_const_i32(1);
3264 tcg_gen_st_i32(t0, cpu_env,
3265 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3266 tcg_temp_free_i32(t0);
3267 /* Stop translation, as the CPU is supposed to sleep from now */
3268 gen_exception_nip(ctx, EXCP_HLT, ctx->nip);
3271 #if defined(TARGET_PPC64)
3272 static void gen_doze(DisasContext *ctx)
3274 #if defined(CONFIG_USER_ONLY)
3275 GEN_PRIV;
3276 #else
3277 TCGv_i32 t;
3279 CHK_HV;
3280 t = tcg_const_i32(PPC_PM_DOZE);
3281 gen_helper_pminsn(cpu_env, t);
3282 tcg_temp_free_i32(t);
3283 gen_stop_exception(ctx);
3284 #endif /* defined(CONFIG_USER_ONLY) */
3287 static void gen_nap(DisasContext *ctx)
3289 #if defined(CONFIG_USER_ONLY)
3290 GEN_PRIV;
3291 #else
3292 TCGv_i32 t;
3294 CHK_HV;
3295 t = tcg_const_i32(PPC_PM_NAP);
3296 gen_helper_pminsn(cpu_env, t);
3297 tcg_temp_free_i32(t);
3298 gen_stop_exception(ctx);
3299 #endif /* defined(CONFIG_USER_ONLY) */
3302 static void gen_sleep(DisasContext *ctx)
3304 #if defined(CONFIG_USER_ONLY)
3305 GEN_PRIV;
3306 #else
3307 TCGv_i32 t;
3309 CHK_HV;
3310 t = tcg_const_i32(PPC_PM_SLEEP);
3311 gen_helper_pminsn(cpu_env, t);
3312 tcg_temp_free_i32(t);
3313 gen_stop_exception(ctx);
3314 #endif /* defined(CONFIG_USER_ONLY) */
3317 static void gen_rvwinkle(DisasContext *ctx)
3319 #if defined(CONFIG_USER_ONLY)
3320 GEN_PRIV;
3321 #else
3322 TCGv_i32 t;
3324 CHK_HV;
3325 t = tcg_const_i32(PPC_PM_RVWINKLE);
3326 gen_helper_pminsn(cpu_env, t);
3327 tcg_temp_free_i32(t);
3328 gen_stop_exception(ctx);
3329 #endif /* defined(CONFIG_USER_ONLY) */
3331 #endif /* #if defined(TARGET_PPC64) */
3333 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
3335 #if defined(TARGET_PPC64)
3336 if (ctx->has_cfar)
3337 tcg_gen_movi_tl(cpu_cfar, nip);
3338 #endif
3341 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
3343 if (unlikely(ctx->singlestep_enabled)) {
3344 return false;
3347 #ifndef CONFIG_USER_ONLY
3348 return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
3349 #else
3350 return true;
3351 #endif
3354 /*** Branch ***/
3355 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3357 if (NARROW_MODE(ctx)) {
3358 dest = (uint32_t) dest;
3360 if (use_goto_tb(ctx, dest)) {
3361 tcg_gen_goto_tb(n);
3362 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3363 tcg_gen_exit_tb((uintptr_t)ctx->tb + n);
3364 } else {
3365 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3366 if (unlikely(ctx->singlestep_enabled)) {
3367 if ((ctx->singlestep_enabled &
3368 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3369 (ctx->exception == POWERPC_EXCP_BRANCH ||
3370 ctx->exception == POWERPC_EXCP_TRACE)) {
3371 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, dest);
3373 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3374 gen_debug_exception(ctx);
3377 tcg_gen_exit_tb(0);
3381 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3383 if (NARROW_MODE(ctx)) {
3384 nip = (uint32_t)nip;
3386 tcg_gen_movi_tl(cpu_lr, nip);
3389 /* b ba bl bla */
3390 static void gen_b(DisasContext *ctx)
3392 target_ulong li, target;
3394 ctx->exception = POWERPC_EXCP_BRANCH;
3395 /* sign extend LI */
3396 li = LI(ctx->opcode);
3397 li = (li ^ 0x02000000) - 0x02000000;
3398 if (likely(AA(ctx->opcode) == 0)) {
3399 target = ctx->nip + li - 4;
3400 } else {
3401 target = li;
3403 if (LK(ctx->opcode)) {
3404 gen_setlr(ctx, ctx->nip);
3406 gen_update_cfar(ctx, ctx->nip - 4);
3407 gen_goto_tb(ctx, 0, target);
3410 #define BCOND_IM 0
3411 #define BCOND_LR 1
3412 #define BCOND_CTR 2
3413 #define BCOND_TAR 3
3415 static inline void gen_bcond(DisasContext *ctx, int type)
3417 uint32_t bo = BO(ctx->opcode);
3418 TCGLabel *l1;
3419 TCGv target;
3421 ctx->exception = POWERPC_EXCP_BRANCH;
3422 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3423 target = tcg_temp_local_new();
3424 if (type == BCOND_CTR)
3425 tcg_gen_mov_tl(target, cpu_ctr);
3426 else if (type == BCOND_TAR)
3427 gen_load_spr(target, SPR_TAR);
3428 else
3429 tcg_gen_mov_tl(target, cpu_lr);
3430 } else {
3431 TCGV_UNUSED(target);
3433 if (LK(ctx->opcode))
3434 gen_setlr(ctx, ctx->nip);
3435 l1 = gen_new_label();
3436 if ((bo & 0x4) == 0) {
3437 /* Decrement and test CTR */
3438 TCGv temp = tcg_temp_new();
3439 if (unlikely(type == BCOND_CTR)) {
3440 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3441 return;
3443 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3444 if (NARROW_MODE(ctx)) {
3445 tcg_gen_ext32u_tl(temp, cpu_ctr);
3446 } else {
3447 tcg_gen_mov_tl(temp, cpu_ctr);
3449 if (bo & 0x2) {
3450 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3451 } else {
3452 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3454 tcg_temp_free(temp);
3456 if ((bo & 0x10) == 0) {
3457 /* Test CR */
3458 uint32_t bi = BI(ctx->opcode);
3459 uint32_t mask = 0x08 >> (bi & 0x03);
3460 TCGv_i32 temp = tcg_temp_new_i32();
3462 if (bo & 0x8) {
3463 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3464 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3465 } else {
3466 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3467 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3469 tcg_temp_free_i32(temp);
3471 gen_update_cfar(ctx, ctx->nip - 4);
3472 if (type == BCOND_IM) {
3473 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3474 if (likely(AA(ctx->opcode) == 0)) {
3475 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3476 } else {
3477 gen_goto_tb(ctx, 0, li);
3479 if ((bo & 0x14) != 0x14) {
3480 gen_set_label(l1);
3481 gen_goto_tb(ctx, 1, ctx->nip);
3483 } else {
3484 if (NARROW_MODE(ctx)) {
3485 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3486 } else {
3487 tcg_gen_andi_tl(cpu_nip, target, ~3);
3489 tcg_gen_exit_tb(0);
3490 if ((bo & 0x14) != 0x14) {
3491 gen_set_label(l1);
3492 gen_update_nip(ctx, ctx->nip);
3493 tcg_gen_exit_tb(0);
3496 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3497 tcg_temp_free(target);
3501 static void gen_bc(DisasContext *ctx)
3503 gen_bcond(ctx, BCOND_IM);
3506 static void gen_bcctr(DisasContext *ctx)
3508 gen_bcond(ctx, BCOND_CTR);
3511 static void gen_bclr(DisasContext *ctx)
3513 gen_bcond(ctx, BCOND_LR);
3516 static void gen_bctar(DisasContext *ctx)
3518 gen_bcond(ctx, BCOND_TAR);
3521 /*** Condition register logical ***/
3522 #define GEN_CRLOGIC(name, tcg_op, opc) \
3523 static void glue(gen_, name)(DisasContext *ctx) \
3525 uint8_t bitmask; \
3526 int sh; \
3527 TCGv_i32 t0, t1; \
3528 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3529 t0 = tcg_temp_new_i32(); \
3530 if (sh > 0) \
3531 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3532 else if (sh < 0) \
3533 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3534 else \
3535 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3536 t1 = tcg_temp_new_i32(); \
3537 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3538 if (sh > 0) \
3539 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3540 else if (sh < 0) \
3541 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3542 else \
3543 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3544 tcg_op(t0, t0, t1); \
3545 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
3546 tcg_gen_andi_i32(t0, t0, bitmask); \
3547 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3548 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3549 tcg_temp_free_i32(t0); \
3550 tcg_temp_free_i32(t1); \
3553 /* crand */
3554 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3555 /* crandc */
3556 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3557 /* creqv */
3558 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3559 /* crnand */
3560 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3561 /* crnor */
3562 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3563 /* cror */
3564 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3565 /* crorc */
3566 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3567 /* crxor */
3568 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3570 /* mcrf */
3571 static void gen_mcrf(DisasContext *ctx)
3573 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3576 /*** System linkage ***/
3578 /* rfi (supervisor only) */
3579 static void gen_rfi(DisasContext *ctx)
3581 #if defined(CONFIG_USER_ONLY)
3582 GEN_PRIV;
3583 #else
3584 /* This instruction doesn't exist anymore on 64-bit server
3585 * processors compliant with arch 2.x
3587 if (ctx->insns_flags & PPC_SEGMENT_64B) {
3588 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3589 return;
3591 /* Restore CPU state */
3592 CHK_SV;
3593 gen_update_cfar(ctx, ctx->nip - 4);
3594 gen_helper_rfi(cpu_env);
3595 gen_sync_exception(ctx);
3596 #endif
3599 #if defined(TARGET_PPC64)
3600 static void gen_rfid(DisasContext *ctx)
3602 #if defined(CONFIG_USER_ONLY)
3603 GEN_PRIV;
3604 #else
3605 /* Restore CPU state */
3606 CHK_SV;
3607 gen_update_cfar(ctx, ctx->nip - 4);
3608 gen_helper_rfid(cpu_env);
3609 gen_sync_exception(ctx);
3610 #endif
3613 static void gen_hrfid(DisasContext *ctx)
3615 #if defined(CONFIG_USER_ONLY)
3616 GEN_PRIV;
3617 #else
3618 /* Restore CPU state */
3619 CHK_HV;
3620 gen_helper_hrfid(cpu_env);
3621 gen_sync_exception(ctx);
3622 #endif
3624 #endif
3626 /* sc */
3627 #if defined(CONFIG_USER_ONLY)
3628 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3629 #else
3630 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3631 #endif
3632 static void gen_sc(DisasContext *ctx)
3634 uint32_t lev;
3636 lev = (ctx->opcode >> 5) & 0x7F;
3637 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3640 /*** Trap ***/
3642 /* Check for unconditional traps (always or never) */
3643 static bool check_unconditional_trap(DisasContext *ctx)
3645 /* Trap never */
3646 if (TO(ctx->opcode) == 0) {
3647 return true;
3649 /* Trap always */
3650 if (TO(ctx->opcode) == 31) {
3651 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
3652 return true;
3654 return false;
3657 /* tw */
3658 static void gen_tw(DisasContext *ctx)
3660 TCGv_i32 t0;
3662 if (check_unconditional_trap(ctx)) {
3663 return;
3665 t0 = tcg_const_i32(TO(ctx->opcode));
3666 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3667 t0);
3668 tcg_temp_free_i32(t0);
3671 /* twi */
3672 static void gen_twi(DisasContext *ctx)
3674 TCGv t0;
3675 TCGv_i32 t1;
3677 if (check_unconditional_trap(ctx)) {
3678 return;
3680 t0 = tcg_const_tl(SIMM(ctx->opcode));
3681 t1 = tcg_const_i32(TO(ctx->opcode));
3682 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3683 tcg_temp_free(t0);
3684 tcg_temp_free_i32(t1);
3687 #if defined(TARGET_PPC64)
3688 /* td */
3689 static void gen_td(DisasContext *ctx)
3691 TCGv_i32 t0;
3693 if (check_unconditional_trap(ctx)) {
3694 return;
3696 t0 = tcg_const_i32(TO(ctx->opcode));
3697 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3698 t0);
3699 tcg_temp_free_i32(t0);
3702 /* tdi */
3703 static void gen_tdi(DisasContext *ctx)
3705 TCGv t0;
3706 TCGv_i32 t1;
3708 if (check_unconditional_trap(ctx)) {
3709 return;
3711 t0 = tcg_const_tl(SIMM(ctx->opcode));
3712 t1 = tcg_const_i32(TO(ctx->opcode));
3713 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3714 tcg_temp_free(t0);
3715 tcg_temp_free_i32(t1);
3717 #endif
3719 /*** Processor control ***/
3721 static void gen_read_xer(TCGv dst)
3723 TCGv t0 = tcg_temp_new();
3724 TCGv t1 = tcg_temp_new();
3725 TCGv t2 = tcg_temp_new();
3726 tcg_gen_mov_tl(dst, cpu_xer);
3727 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
3728 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
3729 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
3730 tcg_gen_or_tl(t0, t0, t1);
3731 tcg_gen_or_tl(dst, dst, t2);
3732 tcg_gen_or_tl(dst, dst, t0);
3733 tcg_temp_free(t0);
3734 tcg_temp_free(t1);
3735 tcg_temp_free(t2);
3738 static void gen_write_xer(TCGv src)
3740 tcg_gen_andi_tl(cpu_xer, src,
3741 ~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA)));
3742 tcg_gen_shri_tl(cpu_so, src, XER_SO);
3743 tcg_gen_shri_tl(cpu_ov, src, XER_OV);
3744 tcg_gen_shri_tl(cpu_ca, src, XER_CA);
3745 tcg_gen_andi_tl(cpu_so, cpu_so, 1);
3746 tcg_gen_andi_tl(cpu_ov, cpu_ov, 1);
3747 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
3750 /* mcrxr */
3751 static void gen_mcrxr(DisasContext *ctx)
3753 TCGv_i32 t0 = tcg_temp_new_i32();
3754 TCGv_i32 t1 = tcg_temp_new_i32();
3755 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
3757 tcg_gen_trunc_tl_i32(t0, cpu_so);
3758 tcg_gen_trunc_tl_i32(t1, cpu_ov);
3759 tcg_gen_trunc_tl_i32(dst, cpu_ca);
3760 tcg_gen_shli_i32(t0, t0, 3);
3761 tcg_gen_shli_i32(t1, t1, 2);
3762 tcg_gen_shli_i32(dst, dst, 1);
3763 tcg_gen_or_i32(dst, dst, t0);
3764 tcg_gen_or_i32(dst, dst, t1);
3765 tcg_temp_free_i32(t0);
3766 tcg_temp_free_i32(t1);
3768 tcg_gen_movi_tl(cpu_so, 0);
3769 tcg_gen_movi_tl(cpu_ov, 0);
3770 tcg_gen_movi_tl(cpu_ca, 0);
3773 /* mfcr mfocrf */
3774 static void gen_mfcr(DisasContext *ctx)
3776 uint32_t crm, crn;
3778 if (likely(ctx->opcode & 0x00100000)) {
3779 crm = CRM(ctx->opcode);
3780 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3781 crn = ctz32 (crm);
3782 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3783 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3784 cpu_gpr[rD(ctx->opcode)], crn * 4);
3786 } else {
3787 TCGv_i32 t0 = tcg_temp_new_i32();
3788 tcg_gen_mov_i32(t0, cpu_crf[0]);
3789 tcg_gen_shli_i32(t0, t0, 4);
3790 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3791 tcg_gen_shli_i32(t0, t0, 4);
3792 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3793 tcg_gen_shli_i32(t0, t0, 4);
3794 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3795 tcg_gen_shli_i32(t0, t0, 4);
3796 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3797 tcg_gen_shli_i32(t0, t0, 4);
3798 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3799 tcg_gen_shli_i32(t0, t0, 4);
3800 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3801 tcg_gen_shli_i32(t0, t0, 4);
3802 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3803 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3804 tcg_temp_free_i32(t0);
3808 /* mfmsr */
3809 static void gen_mfmsr(DisasContext *ctx)
3811 CHK_SV;
3812 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3815 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
3817 #if 0
3818 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3819 printf("ERROR: try to access SPR %d !\n", sprn);
3820 #endif
3822 #define SPR_NOACCESS (&spr_noaccess)
3824 /* mfspr */
3825 static inline void gen_op_mfspr(DisasContext *ctx)
3827 void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
3828 uint32_t sprn = SPR(ctx->opcode);
3830 #if defined(CONFIG_USER_ONLY)
3831 read_cb = ctx->spr_cb[sprn].uea_read;
3832 #else
3833 if (ctx->pr) {
3834 read_cb = ctx->spr_cb[sprn].uea_read;
3835 } else if (ctx->hv) {
3836 read_cb = ctx->spr_cb[sprn].hea_read;
3837 } else {
3838 read_cb = ctx->spr_cb[sprn].oea_read;
3840 #endif
3841 if (likely(read_cb != NULL)) {
3842 if (likely(read_cb != SPR_NOACCESS)) {
3843 (*read_cb)(ctx, rD(ctx->opcode), sprn);
3844 } else {
3845 /* Privilege exception */
3846 /* This is a hack to avoid warnings when running Linux:
3847 * this OS breaks the PowerPC virtualisation model,
3848 * allowing userland application to read the PVR
3850 if (sprn != SPR_PVR) {
3851 fprintf(stderr, "Trying to read privileged spr %d (0x%03x) at "
3852 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3853 if (qemu_log_separate()) {
3854 qemu_log("Trying to read privileged spr %d (0x%03x) at "
3855 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3858 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
3860 } else {
3861 /* ISA 2.07 defines these as no-ops */
3862 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
3863 (sprn >= 808 && sprn <= 811)) {
3864 /* This is a nop */
3865 return;
3867 /* Not defined */
3868 fprintf(stderr, "Trying to read invalid spr %d (0x%03x) at "
3869 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3870 if (qemu_log_separate()) {
3871 qemu_log("Trying to read invalid spr %d (0x%03x) at "
3872 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3875 /* The behaviour depends on MSR:PR and SPR# bit 0x10,
3876 * it can generate a priv, a hv emu or a no-op
3878 if (sprn & 0x10) {
3879 if (ctx->pr) {
3880 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3882 } else {
3883 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
3884 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3890 static void gen_mfspr(DisasContext *ctx)
3892 gen_op_mfspr(ctx);
3895 /* mftb */
3896 static void gen_mftb(DisasContext *ctx)
3898 gen_op_mfspr(ctx);
3901 /* mtcrf mtocrf*/
3902 static void gen_mtcrf(DisasContext *ctx)
3904 uint32_t crm, crn;
3906 crm = CRM(ctx->opcode);
3907 if (likely((ctx->opcode & 0x00100000))) {
3908 if (crm && ((crm & (crm - 1)) == 0)) {
3909 TCGv_i32 temp = tcg_temp_new_i32();
3910 crn = ctz32 (crm);
3911 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3912 tcg_gen_shri_i32(temp, temp, crn * 4);
3913 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3914 tcg_temp_free_i32(temp);
3916 } else {
3917 TCGv_i32 temp = tcg_temp_new_i32();
3918 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3919 for (crn = 0 ; crn < 8 ; crn++) {
3920 if (crm & (1 << crn)) {
3921 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3922 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3925 tcg_temp_free_i32(temp);
3929 /* mtmsr */
3930 #if defined(TARGET_PPC64)
3931 static void gen_mtmsrd(DisasContext *ctx)
3933 CHK_SV;
3935 #if !defined(CONFIG_USER_ONLY)
3936 if (ctx->opcode & 0x00010000) {
3937 /* Special form that does not need any synchronisation */
3938 TCGv t0 = tcg_temp_new();
3939 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3940 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
3941 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3942 tcg_temp_free(t0);
3943 } else {
3944 /* XXX: we need to update nip before the store
3945 * if we enter power saving mode, we will exit the loop
3946 * directly from ppc_store_msr
3948 gen_update_nip(ctx, ctx->nip);
3949 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
3950 /* Must stop the translation as machine state (may have) changed */
3951 /* Note that mtmsr is not always defined as context-synchronizing */
3952 gen_stop_exception(ctx);
3954 #endif /* !defined(CONFIG_USER_ONLY) */
3956 #endif /* defined(TARGET_PPC64) */
3958 static void gen_mtmsr(DisasContext *ctx)
3960 CHK_SV;
3962 #if !defined(CONFIG_USER_ONLY)
3963 if (ctx->opcode & 0x00010000) {
3964 /* Special form that does not need any synchronisation */
3965 TCGv t0 = tcg_temp_new();
3966 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3967 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
3968 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3969 tcg_temp_free(t0);
3970 } else {
3971 TCGv msr = tcg_temp_new();
3973 /* XXX: we need to update nip before the store
3974 * if we enter power saving mode, we will exit the loop
3975 * directly from ppc_store_msr
3977 gen_update_nip(ctx, ctx->nip);
3978 #if defined(TARGET_PPC64)
3979 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
3980 #else
3981 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
3982 #endif
3983 gen_helper_store_msr(cpu_env, msr);
3984 tcg_temp_free(msr);
3985 /* Must stop the translation as machine state (may have) changed */
3986 /* Note that mtmsr is not always defined as context-synchronizing */
3987 gen_stop_exception(ctx);
3989 #endif
3992 /* mtspr */
3993 static void gen_mtspr(DisasContext *ctx)
3995 void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
3996 uint32_t sprn = SPR(ctx->opcode);
3998 #if defined(CONFIG_USER_ONLY)
3999 write_cb = ctx->spr_cb[sprn].uea_write;
4000 #else
4001 if (ctx->pr) {
4002 write_cb = ctx->spr_cb[sprn].uea_write;
4003 } else if (ctx->hv) {
4004 write_cb = ctx->spr_cb[sprn].hea_write;
4005 } else {
4006 write_cb = ctx->spr_cb[sprn].oea_write;
4008 #endif
4009 if (likely(write_cb != NULL)) {
4010 if (likely(write_cb != SPR_NOACCESS)) {
4011 (*write_cb)(ctx, sprn, rS(ctx->opcode));
4012 } else {
4013 /* Privilege exception */
4014 fprintf(stderr, "Trying to write privileged spr %d (0x%03x) at "
4015 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4016 if (qemu_log_separate()) {
4017 qemu_log("Trying to write privileged spr %d (0x%03x) at "
4018 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4020 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4022 } else {
4023 /* ISA 2.07 defines these as no-ops */
4024 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4025 (sprn >= 808 && sprn <= 811)) {
4026 /* This is a nop */
4027 return;
4030 /* Not defined */
4031 if (qemu_log_separate()) {
4032 qemu_log("Trying to write invalid spr %d (0x%03x) at "
4033 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4035 fprintf(stderr, "Trying to write invalid spr %d (0x%03x) at "
4036 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4039 /* The behaviour depends on MSR:PR and SPR# bit 0x10,
4040 * it can generate a priv, a hv emu or a no-op
4042 if (sprn & 0x10) {
4043 if (ctx->pr) {
4044 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4046 } else {
4047 if (ctx->pr || sprn == 0) {
4048 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4054 #if defined(TARGET_PPC64)
4055 /* setb */
4056 static void gen_setb(DisasContext *ctx)
4058 TCGv_i32 t0 = tcg_temp_new_i32();
4059 TCGv_i32 t8 = tcg_temp_new_i32();
4060 TCGv_i32 tm1 = tcg_temp_new_i32();
4061 int crf = crfS(ctx->opcode);
4063 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4064 tcg_gen_movi_i32(t8, 8);
4065 tcg_gen_movi_i32(tm1, -1);
4066 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4067 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4069 tcg_temp_free_i32(t0);
4070 tcg_temp_free_i32(t8);
4071 tcg_temp_free_i32(tm1);
4073 #endif
4075 /*** Cache management ***/
4077 /* dcbf */
4078 static void gen_dcbf(DisasContext *ctx)
4080 /* XXX: specification says this is treated as a load by the MMU */
4081 TCGv t0;
4082 gen_set_access_type(ctx, ACCESS_CACHE);
4083 t0 = tcg_temp_new();
4084 gen_addr_reg_index(ctx, t0);
4085 gen_qemu_ld8u(ctx, t0, t0);
4086 tcg_temp_free(t0);
4089 /* dcbi (Supervisor only) */
4090 static void gen_dcbi(DisasContext *ctx)
4092 #if defined(CONFIG_USER_ONLY)
4093 GEN_PRIV;
4094 #else
4095 TCGv EA, val;
4097 CHK_SV;
4098 EA = tcg_temp_new();
4099 gen_set_access_type(ctx, ACCESS_CACHE);
4100 gen_addr_reg_index(ctx, EA);
4101 val = tcg_temp_new();
4102 /* XXX: specification says this should be treated as a store by the MMU */
4103 gen_qemu_ld8u(ctx, val, EA);
4104 gen_qemu_st8(ctx, val, EA);
4105 tcg_temp_free(val);
4106 tcg_temp_free(EA);
4107 #endif /* defined(CONFIG_USER_ONLY) */
4110 /* dcdst */
4111 static void gen_dcbst(DisasContext *ctx)
4113 /* XXX: specification say this is treated as a load by the MMU */
4114 TCGv t0;
4115 gen_set_access_type(ctx, ACCESS_CACHE);
4116 t0 = tcg_temp_new();
4117 gen_addr_reg_index(ctx, t0);
4118 gen_qemu_ld8u(ctx, t0, t0);
4119 tcg_temp_free(t0);
4122 /* dcbt */
4123 static void gen_dcbt(DisasContext *ctx)
4125 /* interpreted as no-op */
4126 /* XXX: specification say this is treated as a load by the MMU
4127 * but does not generate any exception
4131 /* dcbtst */
4132 static void gen_dcbtst(DisasContext *ctx)
4134 /* interpreted as no-op */
4135 /* XXX: specification say this is treated as a load by the MMU
4136 * but does not generate any exception
4140 /* dcbtls */
4141 static void gen_dcbtls(DisasContext *ctx)
4143 /* Always fails locking the cache */
4144 TCGv t0 = tcg_temp_new();
4145 gen_load_spr(t0, SPR_Exxx_L1CSR0);
4146 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4147 gen_store_spr(SPR_Exxx_L1CSR0, t0);
4148 tcg_temp_free(t0);
4151 /* dcbz */
4152 static void gen_dcbz(DisasContext *ctx)
4154 TCGv tcgv_addr;
4155 TCGv_i32 tcgv_op;
4157 gen_set_access_type(ctx, ACCESS_CACHE);
4158 tcgv_addr = tcg_temp_new();
4159 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
4160 gen_addr_reg_index(ctx, tcgv_addr);
4161 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
4162 tcg_temp_free(tcgv_addr);
4163 tcg_temp_free_i32(tcgv_op);
4166 /* dst / dstt */
4167 static void gen_dst(DisasContext *ctx)
4169 if (rA(ctx->opcode) == 0) {
4170 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4171 } else {
4172 /* interpreted as no-op */
4176 /* dstst /dststt */
4177 static void gen_dstst(DisasContext *ctx)
4179 if (rA(ctx->opcode) == 0) {
4180 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4181 } else {
4182 /* interpreted as no-op */
4187 /* dss / dssall */
4188 static void gen_dss(DisasContext *ctx)
4190 /* interpreted as no-op */
4193 /* icbi */
4194 static void gen_icbi(DisasContext *ctx)
4196 TCGv t0;
4197 gen_set_access_type(ctx, ACCESS_CACHE);
4198 t0 = tcg_temp_new();
4199 gen_addr_reg_index(ctx, t0);
4200 gen_helper_icbi(cpu_env, t0);
4201 tcg_temp_free(t0);
4204 /* Optional: */
4205 /* dcba */
4206 static void gen_dcba(DisasContext *ctx)
4208 /* interpreted as no-op */
4209 /* XXX: specification say this is treated as a store by the MMU
4210 * but does not generate any exception
4214 /*** Segment register manipulation ***/
4215 /* Supervisor only: */
4217 /* mfsr */
4218 static void gen_mfsr(DisasContext *ctx)
4220 #if defined(CONFIG_USER_ONLY)
4221 GEN_PRIV;
4222 #else
4223 TCGv t0;
4225 CHK_SV;
4226 t0 = tcg_const_tl(SR(ctx->opcode));
4227 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4228 tcg_temp_free(t0);
4229 #endif /* defined(CONFIG_USER_ONLY) */
4232 /* mfsrin */
4233 static void gen_mfsrin(DisasContext *ctx)
4235 #if defined(CONFIG_USER_ONLY)
4236 GEN_PRIV;
4237 #else
4238 TCGv t0;
4240 CHK_SV;
4241 t0 = tcg_temp_new();
4242 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4243 tcg_gen_andi_tl(t0, t0, 0xF);
4244 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4245 tcg_temp_free(t0);
4246 #endif /* defined(CONFIG_USER_ONLY) */
4249 /* mtsr */
4250 static void gen_mtsr(DisasContext *ctx)
4252 #if defined(CONFIG_USER_ONLY)
4253 GEN_PRIV;
4254 #else
4255 TCGv t0;
4257 CHK_SV;
4258 t0 = tcg_const_tl(SR(ctx->opcode));
4259 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4260 tcg_temp_free(t0);
4261 #endif /* defined(CONFIG_USER_ONLY) */
4264 /* mtsrin */
4265 static void gen_mtsrin(DisasContext *ctx)
4267 #if defined(CONFIG_USER_ONLY)
4268 GEN_PRIV;
4269 #else
4270 TCGv t0;
4271 CHK_SV;
4273 t0 = tcg_temp_new();
4274 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4275 tcg_gen_andi_tl(t0, t0, 0xF);
4276 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
4277 tcg_temp_free(t0);
4278 #endif /* defined(CONFIG_USER_ONLY) */
4281 #if defined(TARGET_PPC64)
4282 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4284 /* mfsr */
4285 static void gen_mfsr_64b(DisasContext *ctx)
4287 #if defined(CONFIG_USER_ONLY)
4288 GEN_PRIV;
4289 #else
4290 TCGv t0;
4292 CHK_SV;
4293 t0 = tcg_const_tl(SR(ctx->opcode));
4294 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4295 tcg_temp_free(t0);
4296 #endif /* defined(CONFIG_USER_ONLY) */
4299 /* mfsrin */
4300 static void gen_mfsrin_64b(DisasContext *ctx)
4302 #if defined(CONFIG_USER_ONLY)
4303 GEN_PRIV;
4304 #else
4305 TCGv t0;
4307 CHK_SV;
4308 t0 = tcg_temp_new();
4309 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4310 tcg_gen_andi_tl(t0, t0, 0xF);
4311 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4312 tcg_temp_free(t0);
4313 #endif /* defined(CONFIG_USER_ONLY) */
4316 /* mtsr */
4317 static void gen_mtsr_64b(DisasContext *ctx)
4319 #if defined(CONFIG_USER_ONLY)
4320 GEN_PRIV;
4321 #else
4322 TCGv t0;
4324 CHK_SV;
4325 t0 = tcg_const_tl(SR(ctx->opcode));
4326 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4327 tcg_temp_free(t0);
4328 #endif /* defined(CONFIG_USER_ONLY) */
4331 /* mtsrin */
4332 static void gen_mtsrin_64b(DisasContext *ctx)
4334 #if defined(CONFIG_USER_ONLY)
4335 GEN_PRIV;
4336 #else
4337 TCGv t0;
4339 CHK_SV;
4340 t0 = tcg_temp_new();
4341 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4342 tcg_gen_andi_tl(t0, t0, 0xF);
4343 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4344 tcg_temp_free(t0);
4345 #endif /* defined(CONFIG_USER_ONLY) */
4348 /* slbmte */
4349 static void gen_slbmte(DisasContext *ctx)
4351 #if defined(CONFIG_USER_ONLY)
4352 GEN_PRIV;
4353 #else
4354 CHK_SV;
4356 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
4357 cpu_gpr[rS(ctx->opcode)]);
4358 #endif /* defined(CONFIG_USER_ONLY) */
4361 static void gen_slbmfee(DisasContext *ctx)
4363 #if defined(CONFIG_USER_ONLY)
4364 GEN_PRIV;
4365 #else
4366 CHK_SV;
4368 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4369 cpu_gpr[rB(ctx->opcode)]);
4370 #endif /* defined(CONFIG_USER_ONLY) */
4373 static void gen_slbmfev(DisasContext *ctx)
4375 #if defined(CONFIG_USER_ONLY)
4376 GEN_PRIV;
4377 #else
4378 CHK_SV;
4380 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4381 cpu_gpr[rB(ctx->opcode)]);
4382 #endif /* defined(CONFIG_USER_ONLY) */
4385 static void gen_slbfee_(DisasContext *ctx)
4387 #if defined(CONFIG_USER_ONLY)
4388 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4389 #else
4390 TCGLabel *l1, *l2;
4392 if (unlikely(ctx->pr)) {
4393 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4394 return;
4396 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4397 cpu_gpr[rB(ctx->opcode)]);
4398 l1 = gen_new_label();
4399 l2 = gen_new_label();
4400 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4401 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
4402 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
4403 tcg_gen_br(l2);
4404 gen_set_label(l1);
4405 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
4406 gen_set_label(l2);
4407 #endif
4409 #endif /* defined(TARGET_PPC64) */
4411 /*** Lookaside buffer management ***/
4412 /* Optional & supervisor only: */
4414 /* tlbia */
4415 static void gen_tlbia(DisasContext *ctx)
4417 #if defined(CONFIG_USER_ONLY)
4418 GEN_PRIV;
4419 #else
4420 CHK_HV;
4422 gen_helper_tlbia(cpu_env);
4423 #endif /* defined(CONFIG_USER_ONLY) */
4426 /* tlbiel */
4427 static void gen_tlbiel(DisasContext *ctx)
4429 #if defined(CONFIG_USER_ONLY)
4430 GEN_PRIV;
4431 #else
4432 CHK_SV;
4434 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4435 #endif /* defined(CONFIG_USER_ONLY) */
4438 /* tlbie */
4439 static void gen_tlbie(DisasContext *ctx)
4441 #if defined(CONFIG_USER_ONLY)
4442 GEN_PRIV;
4443 #else
4444 TCGv_i32 t1;
4445 CHK_HV;
4447 if (NARROW_MODE(ctx)) {
4448 TCGv t0 = tcg_temp_new();
4449 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4450 gen_helper_tlbie(cpu_env, t0);
4451 tcg_temp_free(t0);
4452 } else {
4453 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4455 t1 = tcg_temp_new_i32();
4456 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
4457 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
4458 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
4459 tcg_temp_free_i32(t1);
4460 #endif /* defined(CONFIG_USER_ONLY) */
4463 /* tlbsync */
4464 static void gen_tlbsync(DisasContext *ctx)
4466 #if defined(CONFIG_USER_ONLY)
4467 GEN_PRIV;
4468 #else
4469 CHK_HV;
4471 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
4472 if (ctx->insns_flags & PPC_BOOKE) {
4473 gen_check_tlb_flush(ctx, true);
4475 #endif /* defined(CONFIG_USER_ONLY) */
4478 #if defined(TARGET_PPC64)
4479 /* slbia */
4480 static void gen_slbia(DisasContext *ctx)
4482 #if defined(CONFIG_USER_ONLY)
4483 GEN_PRIV;
4484 #else
4485 CHK_SV;
4487 gen_helper_slbia(cpu_env);
4488 #endif /* defined(CONFIG_USER_ONLY) */
4491 /* slbie */
4492 static void gen_slbie(DisasContext *ctx)
4494 #if defined(CONFIG_USER_ONLY)
4495 GEN_PRIV;
4496 #else
4497 CHK_SV;
4499 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4500 #endif /* defined(CONFIG_USER_ONLY) */
4502 #endif /* defined(TARGET_PPC64) */
4504 /*** External control ***/
4505 /* Optional: */
4507 /* eciwx */
4508 static void gen_eciwx(DisasContext *ctx)
4510 TCGv t0;
4511 /* Should check EAR[E] ! */
4512 gen_set_access_type(ctx, ACCESS_EXT);
4513 t0 = tcg_temp_new();
4514 gen_addr_reg_index(ctx, t0);
4515 gen_check_align(ctx, t0, 0x03);
4516 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4517 tcg_temp_free(t0);
4520 /* ecowx */
4521 static void gen_ecowx(DisasContext *ctx)
4523 TCGv t0;
4524 /* Should check EAR[E] ! */
4525 gen_set_access_type(ctx, ACCESS_EXT);
4526 t0 = tcg_temp_new();
4527 gen_addr_reg_index(ctx, t0);
4528 gen_check_align(ctx, t0, 0x03);
4529 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4530 tcg_temp_free(t0);
4533 /* PowerPC 601 specific instructions */
4535 /* abs - abs. */
4536 static void gen_abs(DisasContext *ctx)
4538 TCGLabel *l1 = gen_new_label();
4539 TCGLabel *l2 = gen_new_label();
4540 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4541 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4542 tcg_gen_br(l2);
4543 gen_set_label(l1);
4544 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4545 gen_set_label(l2);
4546 if (unlikely(Rc(ctx->opcode) != 0))
4547 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4550 /* abso - abso. */
4551 static void gen_abso(DisasContext *ctx)
4553 TCGLabel *l1 = gen_new_label();
4554 TCGLabel *l2 = gen_new_label();
4555 TCGLabel *l3 = gen_new_label();
4556 /* Start with XER OV disabled, the most likely case */
4557 tcg_gen_movi_tl(cpu_ov, 0);
4558 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4559 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4560 tcg_gen_movi_tl(cpu_ov, 1);
4561 tcg_gen_movi_tl(cpu_so, 1);
4562 tcg_gen_br(l2);
4563 gen_set_label(l1);
4564 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4565 tcg_gen_br(l3);
4566 gen_set_label(l2);
4567 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4568 gen_set_label(l3);
4569 if (unlikely(Rc(ctx->opcode) != 0))
4570 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4573 /* clcs */
4574 static void gen_clcs(DisasContext *ctx)
4576 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4577 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4578 tcg_temp_free_i32(t0);
4579 /* Rc=1 sets CR0 to an undefined state */
4582 /* div - div. */
4583 static void gen_div(DisasContext *ctx)
4585 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4586 cpu_gpr[rB(ctx->opcode)]);
4587 if (unlikely(Rc(ctx->opcode) != 0))
4588 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4591 /* divo - divo. */
4592 static void gen_divo(DisasContext *ctx)
4594 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4595 cpu_gpr[rB(ctx->opcode)]);
4596 if (unlikely(Rc(ctx->opcode) != 0))
4597 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4600 /* divs - divs. */
4601 static void gen_divs(DisasContext *ctx)
4603 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4604 cpu_gpr[rB(ctx->opcode)]);
4605 if (unlikely(Rc(ctx->opcode) != 0))
4606 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4609 /* divso - divso. */
4610 static void gen_divso(DisasContext *ctx)
4612 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
4613 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4614 if (unlikely(Rc(ctx->opcode) != 0))
4615 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4618 /* doz - doz. */
4619 static void gen_doz(DisasContext *ctx)
4621 TCGLabel *l1 = gen_new_label();
4622 TCGLabel *l2 = gen_new_label();
4623 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4624 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4625 tcg_gen_br(l2);
4626 gen_set_label(l1);
4627 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4628 gen_set_label(l2);
4629 if (unlikely(Rc(ctx->opcode) != 0))
4630 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4633 /* dozo - dozo. */
4634 static void gen_dozo(DisasContext *ctx)
4636 TCGLabel *l1 = gen_new_label();
4637 TCGLabel *l2 = gen_new_label();
4638 TCGv t0 = tcg_temp_new();
4639 TCGv t1 = tcg_temp_new();
4640 TCGv t2 = tcg_temp_new();
4641 /* Start with XER OV disabled, the most likely case */
4642 tcg_gen_movi_tl(cpu_ov, 0);
4643 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4644 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4645 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4646 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4647 tcg_gen_andc_tl(t1, t1, t2);
4648 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4649 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4650 tcg_gen_movi_tl(cpu_ov, 1);
4651 tcg_gen_movi_tl(cpu_so, 1);
4652 tcg_gen_br(l2);
4653 gen_set_label(l1);
4654 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4655 gen_set_label(l2);
4656 tcg_temp_free(t0);
4657 tcg_temp_free(t1);
4658 tcg_temp_free(t2);
4659 if (unlikely(Rc(ctx->opcode) != 0))
4660 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4663 /* dozi */
4664 static void gen_dozi(DisasContext *ctx)
4666 target_long simm = SIMM(ctx->opcode);
4667 TCGLabel *l1 = gen_new_label();
4668 TCGLabel *l2 = gen_new_label();
4669 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4670 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4671 tcg_gen_br(l2);
4672 gen_set_label(l1);
4673 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4674 gen_set_label(l2);
4675 if (unlikely(Rc(ctx->opcode) != 0))
4676 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4679 /* lscbx - lscbx. */
4680 static void gen_lscbx(DisasContext *ctx)
4682 TCGv t0 = tcg_temp_new();
4683 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4684 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4685 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4687 gen_addr_reg_index(ctx, t0);
4688 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
4689 tcg_temp_free_i32(t1);
4690 tcg_temp_free_i32(t2);
4691 tcg_temp_free_i32(t3);
4692 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4693 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4694 if (unlikely(Rc(ctx->opcode) != 0))
4695 gen_set_Rc0(ctx, t0);
4696 tcg_temp_free(t0);
4699 /* maskg - maskg. */
4700 static void gen_maskg(DisasContext *ctx)
4702 TCGLabel *l1 = gen_new_label();
4703 TCGv t0 = tcg_temp_new();
4704 TCGv t1 = tcg_temp_new();
4705 TCGv t2 = tcg_temp_new();
4706 TCGv t3 = tcg_temp_new();
4707 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4708 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4709 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4710 tcg_gen_addi_tl(t2, t0, 1);
4711 tcg_gen_shr_tl(t2, t3, t2);
4712 tcg_gen_shr_tl(t3, t3, t1);
4713 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4714 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4715 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4716 gen_set_label(l1);
4717 tcg_temp_free(t0);
4718 tcg_temp_free(t1);
4719 tcg_temp_free(t2);
4720 tcg_temp_free(t3);
4721 if (unlikely(Rc(ctx->opcode) != 0))
4722 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4725 /* maskir - maskir. */
4726 static void gen_maskir(DisasContext *ctx)
4728 TCGv t0 = tcg_temp_new();
4729 TCGv t1 = tcg_temp_new();
4730 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4731 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4732 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4733 tcg_temp_free(t0);
4734 tcg_temp_free(t1);
4735 if (unlikely(Rc(ctx->opcode) != 0))
4736 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4739 /* mul - mul. */
4740 static void gen_mul(DisasContext *ctx)
4742 TCGv_i64 t0 = tcg_temp_new_i64();
4743 TCGv_i64 t1 = tcg_temp_new_i64();
4744 TCGv t2 = tcg_temp_new();
4745 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4746 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4747 tcg_gen_mul_i64(t0, t0, t1);
4748 tcg_gen_trunc_i64_tl(t2, t0);
4749 gen_store_spr(SPR_MQ, t2);
4750 tcg_gen_shri_i64(t1, t0, 32);
4751 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4752 tcg_temp_free_i64(t0);
4753 tcg_temp_free_i64(t1);
4754 tcg_temp_free(t2);
4755 if (unlikely(Rc(ctx->opcode) != 0))
4756 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4759 /* mulo - mulo. */
4760 static void gen_mulo(DisasContext *ctx)
4762 TCGLabel *l1 = gen_new_label();
4763 TCGv_i64 t0 = tcg_temp_new_i64();
4764 TCGv_i64 t1 = tcg_temp_new_i64();
4765 TCGv t2 = tcg_temp_new();
4766 /* Start with XER OV disabled, the most likely case */
4767 tcg_gen_movi_tl(cpu_ov, 0);
4768 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4769 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4770 tcg_gen_mul_i64(t0, t0, t1);
4771 tcg_gen_trunc_i64_tl(t2, t0);
4772 gen_store_spr(SPR_MQ, t2);
4773 tcg_gen_shri_i64(t1, t0, 32);
4774 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4775 tcg_gen_ext32s_i64(t1, t0);
4776 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4777 tcg_gen_movi_tl(cpu_ov, 1);
4778 tcg_gen_movi_tl(cpu_so, 1);
4779 gen_set_label(l1);
4780 tcg_temp_free_i64(t0);
4781 tcg_temp_free_i64(t1);
4782 tcg_temp_free(t2);
4783 if (unlikely(Rc(ctx->opcode) != 0))
4784 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4787 /* nabs - nabs. */
4788 static void gen_nabs(DisasContext *ctx)
4790 TCGLabel *l1 = gen_new_label();
4791 TCGLabel *l2 = gen_new_label();
4792 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4793 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4794 tcg_gen_br(l2);
4795 gen_set_label(l1);
4796 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4797 gen_set_label(l2);
4798 if (unlikely(Rc(ctx->opcode) != 0))
4799 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4802 /* nabso - nabso. */
4803 static void gen_nabso(DisasContext *ctx)
4805 TCGLabel *l1 = gen_new_label();
4806 TCGLabel *l2 = gen_new_label();
4807 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4808 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4809 tcg_gen_br(l2);
4810 gen_set_label(l1);
4811 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4812 gen_set_label(l2);
4813 /* nabs never overflows */
4814 tcg_gen_movi_tl(cpu_ov, 0);
4815 if (unlikely(Rc(ctx->opcode) != 0))
4816 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4819 /* rlmi - rlmi. */
4820 static void gen_rlmi(DisasContext *ctx)
4822 uint32_t mb = MB(ctx->opcode);
4823 uint32_t me = ME(ctx->opcode);
4824 TCGv t0 = tcg_temp_new();
4825 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4826 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4827 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4828 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4829 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4830 tcg_temp_free(t0);
4831 if (unlikely(Rc(ctx->opcode) != 0))
4832 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4835 /* rrib - rrib. */
4836 static void gen_rrib(DisasContext *ctx)
4838 TCGv t0 = tcg_temp_new();
4839 TCGv t1 = tcg_temp_new();
4840 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4841 tcg_gen_movi_tl(t1, 0x80000000);
4842 tcg_gen_shr_tl(t1, t1, t0);
4843 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4844 tcg_gen_and_tl(t0, t0, t1);
4845 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4846 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4847 tcg_temp_free(t0);
4848 tcg_temp_free(t1);
4849 if (unlikely(Rc(ctx->opcode) != 0))
4850 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4853 /* sle - sle. */
4854 static void gen_sle(DisasContext *ctx)
4856 TCGv t0 = tcg_temp_new();
4857 TCGv t1 = tcg_temp_new();
4858 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4859 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4860 tcg_gen_subfi_tl(t1, 32, t1);
4861 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4862 tcg_gen_or_tl(t1, t0, t1);
4863 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4864 gen_store_spr(SPR_MQ, t1);
4865 tcg_temp_free(t0);
4866 tcg_temp_free(t1);
4867 if (unlikely(Rc(ctx->opcode) != 0))
4868 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4871 /* sleq - sleq. */
4872 static void gen_sleq(DisasContext *ctx)
4874 TCGv t0 = tcg_temp_new();
4875 TCGv t1 = tcg_temp_new();
4876 TCGv t2 = tcg_temp_new();
4877 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4878 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4879 tcg_gen_shl_tl(t2, t2, t0);
4880 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4881 gen_load_spr(t1, SPR_MQ);
4882 gen_store_spr(SPR_MQ, t0);
4883 tcg_gen_and_tl(t0, t0, t2);
4884 tcg_gen_andc_tl(t1, t1, t2);
4885 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4886 tcg_temp_free(t0);
4887 tcg_temp_free(t1);
4888 tcg_temp_free(t2);
4889 if (unlikely(Rc(ctx->opcode) != 0))
4890 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4893 /* sliq - sliq. */
4894 static void gen_sliq(DisasContext *ctx)
4896 int sh = SH(ctx->opcode);
4897 TCGv t0 = tcg_temp_new();
4898 TCGv t1 = tcg_temp_new();
4899 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4900 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4901 tcg_gen_or_tl(t1, t0, t1);
4902 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4903 gen_store_spr(SPR_MQ, t1);
4904 tcg_temp_free(t0);
4905 tcg_temp_free(t1);
4906 if (unlikely(Rc(ctx->opcode) != 0))
4907 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4910 /* slliq - slliq. */
4911 static void gen_slliq(DisasContext *ctx)
4913 int sh = SH(ctx->opcode);
4914 TCGv t0 = tcg_temp_new();
4915 TCGv t1 = tcg_temp_new();
4916 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4917 gen_load_spr(t1, SPR_MQ);
4918 gen_store_spr(SPR_MQ, t0);
4919 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4920 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4921 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4922 tcg_temp_free(t0);
4923 tcg_temp_free(t1);
4924 if (unlikely(Rc(ctx->opcode) != 0))
4925 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4928 /* sllq - sllq. */
4929 static void gen_sllq(DisasContext *ctx)
4931 TCGLabel *l1 = gen_new_label();
4932 TCGLabel *l2 = gen_new_label();
4933 TCGv t0 = tcg_temp_local_new();
4934 TCGv t1 = tcg_temp_local_new();
4935 TCGv t2 = tcg_temp_local_new();
4936 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4937 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4938 tcg_gen_shl_tl(t1, t1, t2);
4939 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4940 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4941 gen_load_spr(t0, SPR_MQ);
4942 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4943 tcg_gen_br(l2);
4944 gen_set_label(l1);
4945 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4946 gen_load_spr(t2, SPR_MQ);
4947 tcg_gen_andc_tl(t1, t2, t1);
4948 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4949 gen_set_label(l2);
4950 tcg_temp_free(t0);
4951 tcg_temp_free(t1);
4952 tcg_temp_free(t2);
4953 if (unlikely(Rc(ctx->opcode) != 0))
4954 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4957 /* slq - slq. */
4958 static void gen_slq(DisasContext *ctx)
4960 TCGLabel *l1 = gen_new_label();
4961 TCGv t0 = tcg_temp_new();
4962 TCGv t1 = tcg_temp_new();
4963 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4964 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4965 tcg_gen_subfi_tl(t1, 32, t1);
4966 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4967 tcg_gen_or_tl(t1, t0, t1);
4968 gen_store_spr(SPR_MQ, t1);
4969 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4970 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4971 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4972 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4973 gen_set_label(l1);
4974 tcg_temp_free(t0);
4975 tcg_temp_free(t1);
4976 if (unlikely(Rc(ctx->opcode) != 0))
4977 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4980 /* sraiq - sraiq. */
4981 static void gen_sraiq(DisasContext *ctx)
4983 int sh = SH(ctx->opcode);
4984 TCGLabel *l1 = gen_new_label();
4985 TCGv t0 = tcg_temp_new();
4986 TCGv t1 = tcg_temp_new();
4987 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4988 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4989 tcg_gen_or_tl(t0, t0, t1);
4990 gen_store_spr(SPR_MQ, t0);
4991 tcg_gen_movi_tl(cpu_ca, 0);
4992 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4993 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4994 tcg_gen_movi_tl(cpu_ca, 1);
4995 gen_set_label(l1);
4996 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4997 tcg_temp_free(t0);
4998 tcg_temp_free(t1);
4999 if (unlikely(Rc(ctx->opcode) != 0))
5000 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5003 /* sraq - sraq. */
5004 static void gen_sraq(DisasContext *ctx)
5006 TCGLabel *l1 = gen_new_label();
5007 TCGLabel *l2 = gen_new_label();
5008 TCGv t0 = tcg_temp_new();
5009 TCGv t1 = tcg_temp_local_new();
5010 TCGv t2 = tcg_temp_local_new();
5011 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5012 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5013 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
5014 tcg_gen_subfi_tl(t2, 32, t2);
5015 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
5016 tcg_gen_or_tl(t0, t0, t2);
5017 gen_store_spr(SPR_MQ, t0);
5018 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5019 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
5020 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
5021 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
5022 gen_set_label(l1);
5023 tcg_temp_free(t0);
5024 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
5025 tcg_gen_movi_tl(cpu_ca, 0);
5026 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
5027 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
5028 tcg_gen_movi_tl(cpu_ca, 1);
5029 gen_set_label(l2);
5030 tcg_temp_free(t1);
5031 tcg_temp_free(t2);
5032 if (unlikely(Rc(ctx->opcode) != 0))
5033 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5036 /* sre - sre. */
5037 static void gen_sre(DisasContext *ctx)
5039 TCGv t0 = tcg_temp_new();
5040 TCGv t1 = tcg_temp_new();
5041 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5042 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5043 tcg_gen_subfi_tl(t1, 32, t1);
5044 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5045 tcg_gen_or_tl(t1, t0, t1);
5046 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5047 gen_store_spr(SPR_MQ, t1);
5048 tcg_temp_free(t0);
5049 tcg_temp_free(t1);
5050 if (unlikely(Rc(ctx->opcode) != 0))
5051 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5054 /* srea - srea. */
5055 static void gen_srea(DisasContext *ctx)
5057 TCGv t0 = tcg_temp_new();
5058 TCGv t1 = tcg_temp_new();
5059 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5060 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5061 gen_store_spr(SPR_MQ, t0);
5062 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
5063 tcg_temp_free(t0);
5064 tcg_temp_free(t1);
5065 if (unlikely(Rc(ctx->opcode) != 0))
5066 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5069 /* sreq */
5070 static void gen_sreq(DisasContext *ctx)
5072 TCGv t0 = tcg_temp_new();
5073 TCGv t1 = tcg_temp_new();
5074 TCGv t2 = tcg_temp_new();
5075 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5076 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5077 tcg_gen_shr_tl(t1, t1, t0);
5078 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5079 gen_load_spr(t2, SPR_MQ);
5080 gen_store_spr(SPR_MQ, t0);
5081 tcg_gen_and_tl(t0, t0, t1);
5082 tcg_gen_andc_tl(t2, t2, t1);
5083 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5084 tcg_temp_free(t0);
5085 tcg_temp_free(t1);
5086 tcg_temp_free(t2);
5087 if (unlikely(Rc(ctx->opcode) != 0))
5088 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5091 /* sriq */
5092 static void gen_sriq(DisasContext *ctx)
5094 int sh = SH(ctx->opcode);
5095 TCGv t0 = tcg_temp_new();
5096 TCGv t1 = tcg_temp_new();
5097 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5098 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5099 tcg_gen_or_tl(t1, t0, t1);
5100 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5101 gen_store_spr(SPR_MQ, t1);
5102 tcg_temp_free(t0);
5103 tcg_temp_free(t1);
5104 if (unlikely(Rc(ctx->opcode) != 0))
5105 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5108 /* srliq */
5109 static void gen_srliq(DisasContext *ctx)
5111 int sh = SH(ctx->opcode);
5112 TCGv t0 = tcg_temp_new();
5113 TCGv t1 = tcg_temp_new();
5114 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5115 gen_load_spr(t1, SPR_MQ);
5116 gen_store_spr(SPR_MQ, t0);
5117 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5118 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5119 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5120 tcg_temp_free(t0);
5121 tcg_temp_free(t1);
5122 if (unlikely(Rc(ctx->opcode) != 0))
5123 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5126 /* srlq */
5127 static void gen_srlq(DisasContext *ctx)
5129 TCGLabel *l1 = gen_new_label();
5130 TCGLabel *l2 = gen_new_label();
5131 TCGv t0 = tcg_temp_local_new();
5132 TCGv t1 = tcg_temp_local_new();
5133 TCGv t2 = tcg_temp_local_new();
5134 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5135 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5136 tcg_gen_shr_tl(t2, t1, t2);
5137 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5138 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5139 gen_load_spr(t0, SPR_MQ);
5140 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5141 tcg_gen_br(l2);
5142 gen_set_label(l1);
5143 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5144 tcg_gen_and_tl(t0, t0, t2);
5145 gen_load_spr(t1, SPR_MQ);
5146 tcg_gen_andc_tl(t1, t1, t2);
5147 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5148 gen_set_label(l2);
5149 tcg_temp_free(t0);
5150 tcg_temp_free(t1);
5151 tcg_temp_free(t2);
5152 if (unlikely(Rc(ctx->opcode) != 0))
5153 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5156 /* srq */
5157 static void gen_srq(DisasContext *ctx)
5159 TCGLabel *l1 = gen_new_label();
5160 TCGv t0 = tcg_temp_new();
5161 TCGv t1 = tcg_temp_new();
5162 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5163 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5164 tcg_gen_subfi_tl(t1, 32, t1);
5165 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5166 tcg_gen_or_tl(t1, t0, t1);
5167 gen_store_spr(SPR_MQ, t1);
5168 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5169 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5170 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5171 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5172 gen_set_label(l1);
5173 tcg_temp_free(t0);
5174 tcg_temp_free(t1);
5175 if (unlikely(Rc(ctx->opcode) != 0))
5176 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5179 /* PowerPC 602 specific instructions */
5181 /* dsa */
5182 static void gen_dsa(DisasContext *ctx)
5184 /* XXX: TODO */
5185 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5188 /* esa */
5189 static void gen_esa(DisasContext *ctx)
5191 /* XXX: TODO */
5192 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5195 /* mfrom */
5196 static void gen_mfrom(DisasContext *ctx)
5198 #if defined(CONFIG_USER_ONLY)
5199 GEN_PRIV;
5200 #else
5201 CHK_SV;
5202 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5203 #endif /* defined(CONFIG_USER_ONLY) */
5206 /* 602 - 603 - G2 TLB management */
5208 /* tlbld */
5209 static void gen_tlbld_6xx(DisasContext *ctx)
5211 #if defined(CONFIG_USER_ONLY)
5212 GEN_PRIV;
5213 #else
5214 CHK_SV;
5215 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5216 #endif /* defined(CONFIG_USER_ONLY) */
5219 /* tlbli */
5220 static void gen_tlbli_6xx(DisasContext *ctx)
5222 #if defined(CONFIG_USER_ONLY)
5223 GEN_PRIV;
5224 #else
5225 CHK_SV;
5226 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5227 #endif /* defined(CONFIG_USER_ONLY) */
5230 /* 74xx TLB management */
5232 /* tlbld */
5233 static void gen_tlbld_74xx(DisasContext *ctx)
5235 #if defined(CONFIG_USER_ONLY)
5236 GEN_PRIV;
5237 #else
5238 CHK_SV;
5239 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5240 #endif /* defined(CONFIG_USER_ONLY) */
5243 /* tlbli */
5244 static void gen_tlbli_74xx(DisasContext *ctx)
5246 #if defined(CONFIG_USER_ONLY)
5247 GEN_PRIV;
5248 #else
5249 CHK_SV;
5250 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5251 #endif /* defined(CONFIG_USER_ONLY) */
5254 /* POWER instructions not in PowerPC 601 */
5256 /* clf */
5257 static void gen_clf(DisasContext *ctx)
5259 /* Cache line flush: implemented as no-op */
5262 /* cli */
5263 static void gen_cli(DisasContext *ctx)
5265 #if defined(CONFIG_USER_ONLY)
5266 GEN_PRIV;
5267 #else
5268 /* Cache line invalidate: privileged and treated as no-op */
5269 CHK_SV;
5270 #endif /* defined(CONFIG_USER_ONLY) */
5273 /* dclst */
5274 static void gen_dclst(DisasContext *ctx)
5276 /* Data cache line store: treated as no-op */
5279 static void gen_mfsri(DisasContext *ctx)
5281 #if defined(CONFIG_USER_ONLY)
5282 GEN_PRIV;
5283 #else
5284 int ra = rA(ctx->opcode);
5285 int rd = rD(ctx->opcode);
5286 TCGv t0;
5288 CHK_SV;
5289 t0 = tcg_temp_new();
5290 gen_addr_reg_index(ctx, t0);
5291 tcg_gen_shri_tl(t0, t0, 28);
5292 tcg_gen_andi_tl(t0, t0, 0xF);
5293 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
5294 tcg_temp_free(t0);
5295 if (ra != 0 && ra != rd)
5296 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5297 #endif /* defined(CONFIG_USER_ONLY) */
5300 static void gen_rac(DisasContext *ctx)
5302 #if defined(CONFIG_USER_ONLY)
5303 GEN_PRIV;
5304 #else
5305 TCGv t0;
5307 CHK_SV;
5308 t0 = tcg_temp_new();
5309 gen_addr_reg_index(ctx, t0);
5310 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5311 tcg_temp_free(t0);
5312 #endif /* defined(CONFIG_USER_ONLY) */
5315 static void gen_rfsvc(DisasContext *ctx)
5317 #if defined(CONFIG_USER_ONLY)
5318 GEN_PRIV;
5319 #else
5320 CHK_SV;
5322 gen_helper_rfsvc(cpu_env);
5323 gen_sync_exception(ctx);
5324 #endif /* defined(CONFIG_USER_ONLY) */
5327 /* svc is not implemented for now */
5329 /* BookE specific instructions */
5331 /* XXX: not implemented on 440 ? */
5332 static void gen_mfapidi(DisasContext *ctx)
5334 /* XXX: TODO */
5335 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5338 /* XXX: not implemented on 440 ? */
5339 static void gen_tlbiva(DisasContext *ctx)
5341 #if defined(CONFIG_USER_ONLY)
5342 GEN_PRIV;
5343 #else
5344 TCGv t0;
5346 CHK_SV;
5347 t0 = tcg_temp_new();
5348 gen_addr_reg_index(ctx, t0);
5349 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5350 tcg_temp_free(t0);
5351 #endif /* defined(CONFIG_USER_ONLY) */
5354 /* All 405 MAC instructions are translated here */
5355 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5356 int ra, int rb, int rt, int Rc)
5358 TCGv t0, t1;
5360 t0 = tcg_temp_local_new();
5361 t1 = tcg_temp_local_new();
5363 switch (opc3 & 0x0D) {
5364 case 0x05:
5365 /* macchw - macchw. - macchwo - macchwo. */
5366 /* macchws - macchws. - macchwso - macchwso. */
5367 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5368 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5369 /* mulchw - mulchw. */
5370 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5371 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5372 tcg_gen_ext16s_tl(t1, t1);
5373 break;
5374 case 0x04:
5375 /* macchwu - macchwu. - macchwuo - macchwuo. */
5376 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5377 /* mulchwu - mulchwu. */
5378 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5379 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5380 tcg_gen_ext16u_tl(t1, t1);
5381 break;
5382 case 0x01:
5383 /* machhw - machhw. - machhwo - machhwo. */
5384 /* machhws - machhws. - machhwso - machhwso. */
5385 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5386 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5387 /* mulhhw - mulhhw. */
5388 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5389 tcg_gen_ext16s_tl(t0, t0);
5390 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5391 tcg_gen_ext16s_tl(t1, t1);
5392 break;
5393 case 0x00:
5394 /* machhwu - machhwu. - machhwuo - machhwuo. */
5395 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5396 /* mulhhwu - mulhhwu. */
5397 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5398 tcg_gen_ext16u_tl(t0, t0);
5399 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5400 tcg_gen_ext16u_tl(t1, t1);
5401 break;
5402 case 0x0D:
5403 /* maclhw - maclhw. - maclhwo - maclhwo. */
5404 /* maclhws - maclhws. - maclhwso - maclhwso. */
5405 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5406 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5407 /* mullhw - mullhw. */
5408 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5409 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5410 break;
5411 case 0x0C:
5412 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5413 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5414 /* mullhwu - mullhwu. */
5415 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5416 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5417 break;
5419 if (opc2 & 0x04) {
5420 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5421 tcg_gen_mul_tl(t1, t0, t1);
5422 if (opc2 & 0x02) {
5423 /* nmultiply-and-accumulate (0x0E) */
5424 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5425 } else {
5426 /* multiply-and-accumulate (0x0C) */
5427 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5430 if (opc3 & 0x12) {
5431 /* Check overflow and/or saturate */
5432 TCGLabel *l1 = gen_new_label();
5434 if (opc3 & 0x10) {
5435 /* Start with XER OV disabled, the most likely case */
5436 tcg_gen_movi_tl(cpu_ov, 0);
5438 if (opc3 & 0x01) {
5439 /* Signed */
5440 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5441 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5442 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5443 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5444 if (opc3 & 0x02) {
5445 /* Saturate */
5446 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5447 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5449 } else {
5450 /* Unsigned */
5451 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5452 if (opc3 & 0x02) {
5453 /* Saturate */
5454 tcg_gen_movi_tl(t0, UINT32_MAX);
5457 if (opc3 & 0x10) {
5458 /* Check overflow */
5459 tcg_gen_movi_tl(cpu_ov, 1);
5460 tcg_gen_movi_tl(cpu_so, 1);
5462 gen_set_label(l1);
5463 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5465 } else {
5466 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5468 tcg_temp_free(t0);
5469 tcg_temp_free(t1);
5470 if (unlikely(Rc) != 0) {
5471 /* Update Rc0 */
5472 gen_set_Rc0(ctx, cpu_gpr[rt]);
5476 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5477 static void glue(gen_, name)(DisasContext *ctx) \
5479 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5480 rD(ctx->opcode), Rc(ctx->opcode)); \
5483 /* macchw - macchw. */
5484 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5485 /* macchwo - macchwo. */
5486 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5487 /* macchws - macchws. */
5488 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5489 /* macchwso - macchwso. */
5490 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5491 /* macchwsu - macchwsu. */
5492 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5493 /* macchwsuo - macchwsuo. */
5494 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5495 /* macchwu - macchwu. */
5496 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5497 /* macchwuo - macchwuo. */
5498 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5499 /* machhw - machhw. */
5500 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5501 /* machhwo - machhwo. */
5502 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5503 /* machhws - machhws. */
5504 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5505 /* machhwso - machhwso. */
5506 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5507 /* machhwsu - machhwsu. */
5508 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5509 /* machhwsuo - machhwsuo. */
5510 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5511 /* machhwu - machhwu. */
5512 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5513 /* machhwuo - machhwuo. */
5514 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5515 /* maclhw - maclhw. */
5516 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5517 /* maclhwo - maclhwo. */
5518 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5519 /* maclhws - maclhws. */
5520 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5521 /* maclhwso - maclhwso. */
5522 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5523 /* maclhwu - maclhwu. */
5524 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5525 /* maclhwuo - maclhwuo. */
5526 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5527 /* maclhwsu - maclhwsu. */
5528 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5529 /* maclhwsuo - maclhwsuo. */
5530 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5531 /* nmacchw - nmacchw. */
5532 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5533 /* nmacchwo - nmacchwo. */
5534 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5535 /* nmacchws - nmacchws. */
5536 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5537 /* nmacchwso - nmacchwso. */
5538 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5539 /* nmachhw - nmachhw. */
5540 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5541 /* nmachhwo - nmachhwo. */
5542 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5543 /* nmachhws - nmachhws. */
5544 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5545 /* nmachhwso - nmachhwso. */
5546 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5547 /* nmaclhw - nmaclhw. */
5548 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5549 /* nmaclhwo - nmaclhwo. */
5550 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5551 /* nmaclhws - nmaclhws. */
5552 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5553 /* nmaclhwso - nmaclhwso. */
5554 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5556 /* mulchw - mulchw. */
5557 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5558 /* mulchwu - mulchwu. */
5559 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5560 /* mulhhw - mulhhw. */
5561 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5562 /* mulhhwu - mulhhwu. */
5563 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5564 /* mullhw - mullhw. */
5565 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5566 /* mullhwu - mullhwu. */
5567 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5569 /* mfdcr */
5570 static void gen_mfdcr(DisasContext *ctx)
5572 #if defined(CONFIG_USER_ONLY)
5573 GEN_PRIV;
5574 #else
5575 TCGv dcrn;
5577 CHK_SV;
5578 dcrn = tcg_const_tl(SPR(ctx->opcode));
5579 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5580 tcg_temp_free(dcrn);
5581 #endif /* defined(CONFIG_USER_ONLY) */
5584 /* mtdcr */
5585 static void gen_mtdcr(DisasContext *ctx)
5587 #if defined(CONFIG_USER_ONLY)
5588 GEN_PRIV;
5589 #else
5590 TCGv dcrn;
5592 CHK_SV;
5593 dcrn = tcg_const_tl(SPR(ctx->opcode));
5594 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5595 tcg_temp_free(dcrn);
5596 #endif /* defined(CONFIG_USER_ONLY) */
5599 /* mfdcrx */
5600 /* XXX: not implemented on 440 ? */
5601 static void gen_mfdcrx(DisasContext *ctx)
5603 #if defined(CONFIG_USER_ONLY)
5604 GEN_PRIV;
5605 #else
5606 CHK_SV;
5607 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5608 cpu_gpr[rA(ctx->opcode)]);
5609 /* Note: Rc update flag set leads to undefined state of Rc0 */
5610 #endif /* defined(CONFIG_USER_ONLY) */
5613 /* mtdcrx */
5614 /* XXX: not implemented on 440 ? */
5615 static void gen_mtdcrx(DisasContext *ctx)
5617 #if defined(CONFIG_USER_ONLY)
5618 GEN_PRIV;
5619 #else
5620 CHK_SV;
5621 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5622 cpu_gpr[rS(ctx->opcode)]);
5623 /* Note: Rc update flag set leads to undefined state of Rc0 */
5624 #endif /* defined(CONFIG_USER_ONLY) */
5627 /* mfdcrux (PPC 460) : user-mode access to DCR */
5628 static void gen_mfdcrux(DisasContext *ctx)
5630 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5631 cpu_gpr[rA(ctx->opcode)]);
5632 /* Note: Rc update flag set leads to undefined state of Rc0 */
5635 /* mtdcrux (PPC 460) : user-mode access to DCR */
5636 static void gen_mtdcrux(DisasContext *ctx)
5638 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5639 cpu_gpr[rS(ctx->opcode)]);
5640 /* Note: Rc update flag set leads to undefined state of Rc0 */
5643 /* dccci */
5644 static void gen_dccci(DisasContext *ctx)
5646 CHK_SV;
5647 /* interpreted as no-op */
5650 /* dcread */
5651 static void gen_dcread(DisasContext *ctx)
5653 #if defined(CONFIG_USER_ONLY)
5654 GEN_PRIV;
5655 #else
5656 TCGv EA, val;
5658 CHK_SV;
5659 gen_set_access_type(ctx, ACCESS_CACHE);
5660 EA = tcg_temp_new();
5661 gen_addr_reg_index(ctx, EA);
5662 val = tcg_temp_new();
5663 gen_qemu_ld32u(ctx, val, EA);
5664 tcg_temp_free(val);
5665 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5666 tcg_temp_free(EA);
5667 #endif /* defined(CONFIG_USER_ONLY) */
5670 /* icbt */
5671 static void gen_icbt_40x(DisasContext *ctx)
5673 /* interpreted as no-op */
5674 /* XXX: specification say this is treated as a load by the MMU
5675 * but does not generate any exception
5679 /* iccci */
5680 static void gen_iccci(DisasContext *ctx)
5682 CHK_SV;
5683 /* interpreted as no-op */
5686 /* icread */
5687 static void gen_icread(DisasContext *ctx)
5689 CHK_SV;
5690 /* interpreted as no-op */
5693 /* rfci (supervisor only) */
5694 static void gen_rfci_40x(DisasContext *ctx)
5696 #if defined(CONFIG_USER_ONLY)
5697 GEN_PRIV;
5698 #else
5699 CHK_SV;
5700 /* Restore CPU state */
5701 gen_helper_40x_rfci(cpu_env);
5702 gen_sync_exception(ctx);
5703 #endif /* defined(CONFIG_USER_ONLY) */
5706 static void gen_rfci(DisasContext *ctx)
5708 #if defined(CONFIG_USER_ONLY)
5709 GEN_PRIV;
5710 #else
5711 CHK_SV;
5712 /* Restore CPU state */
5713 gen_helper_rfci(cpu_env);
5714 gen_sync_exception(ctx);
5715 #endif /* defined(CONFIG_USER_ONLY) */
5718 /* BookE specific */
5720 /* XXX: not implemented on 440 ? */
5721 static void gen_rfdi(DisasContext *ctx)
5723 #if defined(CONFIG_USER_ONLY)
5724 GEN_PRIV;
5725 #else
5726 CHK_SV;
5727 /* Restore CPU state */
5728 gen_helper_rfdi(cpu_env);
5729 gen_sync_exception(ctx);
5730 #endif /* defined(CONFIG_USER_ONLY) */
5733 /* XXX: not implemented on 440 ? */
5734 static void gen_rfmci(DisasContext *ctx)
5736 #if defined(CONFIG_USER_ONLY)
5737 GEN_PRIV;
5738 #else
5739 CHK_SV;
5740 /* Restore CPU state */
5741 gen_helper_rfmci(cpu_env);
5742 gen_sync_exception(ctx);
5743 #endif /* defined(CONFIG_USER_ONLY) */
5746 /* TLB management - PowerPC 405 implementation */
5748 /* tlbre */
5749 static void gen_tlbre_40x(DisasContext *ctx)
5751 #if defined(CONFIG_USER_ONLY)
5752 GEN_PRIV;
5753 #else
5754 CHK_SV;
5755 switch (rB(ctx->opcode)) {
5756 case 0:
5757 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5758 cpu_gpr[rA(ctx->opcode)]);
5759 break;
5760 case 1:
5761 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5762 cpu_gpr[rA(ctx->opcode)]);
5763 break;
5764 default:
5765 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5766 break;
5768 #endif /* defined(CONFIG_USER_ONLY) */
5771 /* tlbsx - tlbsx. */
5772 static void gen_tlbsx_40x(DisasContext *ctx)
5774 #if defined(CONFIG_USER_ONLY)
5775 GEN_PRIV;
5776 #else
5777 TCGv t0;
5779 CHK_SV;
5780 t0 = tcg_temp_new();
5781 gen_addr_reg_index(ctx, t0);
5782 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5783 tcg_temp_free(t0);
5784 if (Rc(ctx->opcode)) {
5785 TCGLabel *l1 = gen_new_label();
5786 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5787 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5788 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5789 gen_set_label(l1);
5791 #endif /* defined(CONFIG_USER_ONLY) */
5794 /* tlbwe */
5795 static void gen_tlbwe_40x(DisasContext *ctx)
5797 #if defined(CONFIG_USER_ONLY)
5798 GEN_PRIV;
5799 #else
5800 CHK_SV;
5802 switch (rB(ctx->opcode)) {
5803 case 0:
5804 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
5805 cpu_gpr[rS(ctx->opcode)]);
5806 break;
5807 case 1:
5808 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
5809 cpu_gpr[rS(ctx->opcode)]);
5810 break;
5811 default:
5812 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5813 break;
5815 #endif /* defined(CONFIG_USER_ONLY) */
5818 /* TLB management - PowerPC 440 implementation */
5820 /* tlbre */
5821 static void gen_tlbre_440(DisasContext *ctx)
5823 #if defined(CONFIG_USER_ONLY)
5824 GEN_PRIV;
5825 #else
5826 CHK_SV;
5828 switch (rB(ctx->opcode)) {
5829 case 0:
5830 case 1:
5831 case 2:
5833 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5834 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
5835 t0, cpu_gpr[rA(ctx->opcode)]);
5836 tcg_temp_free_i32(t0);
5838 break;
5839 default:
5840 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5841 break;
5843 #endif /* defined(CONFIG_USER_ONLY) */
5846 /* tlbsx - tlbsx. */
5847 static void gen_tlbsx_440(DisasContext *ctx)
5849 #if defined(CONFIG_USER_ONLY)
5850 GEN_PRIV;
5851 #else
5852 TCGv t0;
5854 CHK_SV;
5855 t0 = tcg_temp_new();
5856 gen_addr_reg_index(ctx, t0);
5857 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5858 tcg_temp_free(t0);
5859 if (Rc(ctx->opcode)) {
5860 TCGLabel *l1 = gen_new_label();
5861 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5862 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5863 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5864 gen_set_label(l1);
5866 #endif /* defined(CONFIG_USER_ONLY) */
5869 /* tlbwe */
5870 static void gen_tlbwe_440(DisasContext *ctx)
5872 #if defined(CONFIG_USER_ONLY)
5873 GEN_PRIV;
5874 #else
5875 CHK_SV;
5876 switch (rB(ctx->opcode)) {
5877 case 0:
5878 case 1:
5879 case 2:
5881 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5882 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
5883 cpu_gpr[rS(ctx->opcode)]);
5884 tcg_temp_free_i32(t0);
5886 break;
5887 default:
5888 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5889 break;
5891 #endif /* defined(CONFIG_USER_ONLY) */
5894 /* TLB management - PowerPC BookE 2.06 implementation */
5896 /* tlbre */
5897 static void gen_tlbre_booke206(DisasContext *ctx)
5899 #if defined(CONFIG_USER_ONLY)
5900 GEN_PRIV;
5901 #else
5902 CHK_SV;
5903 gen_helper_booke206_tlbre(cpu_env);
5904 #endif /* defined(CONFIG_USER_ONLY) */
5907 /* tlbsx - tlbsx. */
5908 static void gen_tlbsx_booke206(DisasContext *ctx)
5910 #if defined(CONFIG_USER_ONLY)
5911 GEN_PRIV;
5912 #else
5913 TCGv t0;
5915 CHK_SV;
5916 if (rA(ctx->opcode)) {
5917 t0 = tcg_temp_new();
5918 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
5919 } else {
5920 t0 = tcg_const_tl(0);
5923 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
5924 gen_helper_booke206_tlbsx(cpu_env, t0);
5925 tcg_temp_free(t0);
5926 #endif /* defined(CONFIG_USER_ONLY) */
5929 /* tlbwe */
5930 static void gen_tlbwe_booke206(DisasContext *ctx)
5932 #if defined(CONFIG_USER_ONLY)
5933 GEN_PRIV;
5934 #else
5935 CHK_SV;
5936 gen_helper_booke206_tlbwe(cpu_env);
5937 #endif /* defined(CONFIG_USER_ONLY) */
5940 static void gen_tlbivax_booke206(DisasContext *ctx)
5942 #if defined(CONFIG_USER_ONLY)
5943 GEN_PRIV;
5944 #else
5945 TCGv t0;
5947 CHK_SV;
5948 t0 = tcg_temp_new();
5949 gen_addr_reg_index(ctx, t0);
5950 gen_helper_booke206_tlbivax(cpu_env, t0);
5951 tcg_temp_free(t0);
5952 #endif /* defined(CONFIG_USER_ONLY) */
5955 static void gen_tlbilx_booke206(DisasContext *ctx)
5957 #if defined(CONFIG_USER_ONLY)
5958 GEN_PRIV;
5959 #else
5960 TCGv t0;
5962 CHK_SV;
5963 t0 = tcg_temp_new();
5964 gen_addr_reg_index(ctx, t0);
5966 switch((ctx->opcode >> 21) & 0x3) {
5967 case 0:
5968 gen_helper_booke206_tlbilx0(cpu_env, t0);
5969 break;
5970 case 1:
5971 gen_helper_booke206_tlbilx1(cpu_env, t0);
5972 break;
5973 case 3:
5974 gen_helper_booke206_tlbilx3(cpu_env, t0);
5975 break;
5976 default:
5977 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5978 break;
5981 tcg_temp_free(t0);
5982 #endif /* defined(CONFIG_USER_ONLY) */
5986 /* wrtee */
5987 static void gen_wrtee(DisasContext *ctx)
5989 #if defined(CONFIG_USER_ONLY)
5990 GEN_PRIV;
5991 #else
5992 TCGv t0;
5994 CHK_SV;
5995 t0 = tcg_temp_new();
5996 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5997 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5998 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5999 tcg_temp_free(t0);
6000 /* Stop translation to have a chance to raise an exception
6001 * if we just set msr_ee to 1
6003 gen_stop_exception(ctx);
6004 #endif /* defined(CONFIG_USER_ONLY) */
6007 /* wrteei */
6008 static void gen_wrteei(DisasContext *ctx)
6010 #if defined(CONFIG_USER_ONLY)
6011 GEN_PRIV;
6012 #else
6013 CHK_SV;
6014 if (ctx->opcode & 0x00008000) {
6015 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6016 /* Stop translation to have a chance to raise an exception */
6017 gen_stop_exception(ctx);
6018 } else {
6019 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6021 #endif /* defined(CONFIG_USER_ONLY) */
6024 /* PowerPC 440 specific instructions */
6026 /* dlmzb */
6027 static void gen_dlmzb(DisasContext *ctx)
6029 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6030 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6031 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6032 tcg_temp_free_i32(t0);
6035 /* mbar replaces eieio on 440 */
6036 static void gen_mbar(DisasContext *ctx)
6038 /* interpreted as no-op */
6041 /* msync replaces sync on 440 */
6042 static void gen_msync_4xx(DisasContext *ctx)
6044 /* interpreted as no-op */
6047 /* icbt */
6048 static void gen_icbt_440(DisasContext *ctx)
6050 /* interpreted as no-op */
6051 /* XXX: specification say this is treated as a load by the MMU
6052 * but does not generate any exception
6056 /* Embedded.Processor Control */
6058 static void gen_msgclr(DisasContext *ctx)
6060 #if defined(CONFIG_USER_ONLY)
6061 GEN_PRIV;
6062 #else
6063 CHK_SV;
6064 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6065 #endif /* defined(CONFIG_USER_ONLY) */
6068 static void gen_msgsnd(DisasContext *ctx)
6070 #if defined(CONFIG_USER_ONLY)
6071 GEN_PRIV;
6072 #else
6073 CHK_SV;
6074 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6075 #endif /* defined(CONFIG_USER_ONLY) */
6079 #if defined(TARGET_PPC64)
6080 static void gen_maddld(DisasContext *ctx)
6082 TCGv_i64 t1 = tcg_temp_new_i64();
6084 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6085 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6086 tcg_temp_free_i64(t1);
6089 /* maddhd maddhdu */
6090 static void gen_maddhd_maddhdu(DisasContext *ctx)
6092 TCGv_i64 lo = tcg_temp_new_i64();
6093 TCGv_i64 hi = tcg_temp_new_i64();
6094 TCGv_i64 t1 = tcg_temp_new_i64();
6096 if (Rc(ctx->opcode)) {
6097 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6098 cpu_gpr[rB(ctx->opcode)]);
6099 tcg_gen_movi_i64(t1, 0);
6100 } else {
6101 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6102 cpu_gpr[rB(ctx->opcode)]);
6103 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6105 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6106 cpu_gpr[rC(ctx->opcode)], t1);
6107 tcg_temp_free_i64(lo);
6108 tcg_temp_free_i64(hi);
6109 tcg_temp_free_i64(t1);
6111 #endif /* defined(TARGET_PPC64) */
6113 static void gen_tbegin(DisasContext *ctx)
6115 if (unlikely(!ctx->tm_enabled)) {
6116 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6117 return;
6119 gen_helper_tbegin(cpu_env);
6122 #define GEN_TM_NOOP(name) \
6123 static inline void gen_##name(DisasContext *ctx) \
6125 if (unlikely(!ctx->tm_enabled)) { \
6126 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
6127 return; \
6129 /* Because tbegin always fails in QEMU, these user \
6130 * space instructions all have a simple implementation: \
6132 * CR[0] = 0b0 || MSR[TS] || 0b0 \
6133 * = 0b0 || 0b00 || 0b0 \
6134 */ \
6135 tcg_gen_movi_i32(cpu_crf[0], 0); \
6138 GEN_TM_NOOP(tend);
6139 GEN_TM_NOOP(tabort);
6140 GEN_TM_NOOP(tabortwc);
6141 GEN_TM_NOOP(tabortwci);
6142 GEN_TM_NOOP(tabortdc);
6143 GEN_TM_NOOP(tabortdci);
6144 GEN_TM_NOOP(tsr);
6146 static void gen_tcheck(DisasContext *ctx)
6148 if (unlikely(!ctx->tm_enabled)) {
6149 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6150 return;
6152 /* Because tbegin always fails, the tcheck implementation
6153 * is simple:
6155 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6156 * = 0b1 || 0b00 || 0b0
6158 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6161 #if defined(CONFIG_USER_ONLY)
6162 #define GEN_TM_PRIV_NOOP(name) \
6163 static inline void gen_##name(DisasContext *ctx) \
6165 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
6168 #else
6170 #define GEN_TM_PRIV_NOOP(name) \
6171 static inline void gen_##name(DisasContext *ctx) \
6173 CHK_SV; \
6174 if (unlikely(!ctx->tm_enabled)) { \
6175 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
6176 return; \
6178 /* Because tbegin always fails, the implementation is \
6179 * simple: \
6181 * CR[0] = 0b0 || MSR[TS] || 0b0 \
6182 * = 0b0 || 0b00 | 0b0 \
6183 */ \
6184 tcg_gen_movi_i32(cpu_crf[0], 0); \
6187 #endif
6189 GEN_TM_PRIV_NOOP(treclaim);
6190 GEN_TM_PRIV_NOOP(trechkpt);
6192 #include "translate/fp-impl.inc.c"
6194 #include "translate/vmx-impl.inc.c"
6196 #include "translate/vsx-impl.inc.c"
6198 #include "translate/dfp-impl.inc.c"
6200 #include "translate/spe-impl.inc.c"
6202 static opcode_t opcodes[] = {
6203 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6204 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
6205 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
6206 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER),
6207 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
6208 #if defined(TARGET_PPC64)
6209 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6210 #endif
6211 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6212 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6213 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6214 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6215 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6216 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6217 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6218 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6219 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6220 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6221 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6222 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6223 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6224 #if defined(TARGET_PPC64)
6225 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6226 #endif
6227 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6228 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6229 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6230 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6231 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6232 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6233 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6234 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6235 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6236 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6237 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6238 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6239 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6240 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6241 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6242 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6243 #if defined(TARGET_PPC64)
6244 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6245 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6246 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6247 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6248 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6249 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6250 #endif
6251 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6252 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6253 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6254 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6255 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6256 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6257 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6258 #if defined(TARGET_PPC64)
6259 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6260 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6261 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6262 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6263 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6264 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6265 PPC_NONE, PPC2_ISA300),
6266 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6267 PPC_NONE, PPC2_ISA300),
6268 #endif
6269 #if defined(TARGET_PPC64)
6270 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
6271 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
6272 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
6273 #endif
6274 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6275 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6276 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6277 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6278 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6279 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6280 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
6281 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6282 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6283 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6284 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6285 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6286 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6287 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6288 #if defined(TARGET_PPC64)
6289 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6290 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6291 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6292 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6293 #endif
6294 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6295 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
6296 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6297 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6298 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6299 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6300 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6301 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6302 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6303 #if defined(TARGET_PPC64)
6304 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6305 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6306 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6307 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6308 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6309 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6310 #endif
6311 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
6312 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6313 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6314 #if defined(TARGET_PPC64)
6315 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6316 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6317 #endif
6318 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6319 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6320 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6321 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6322 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6323 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6324 #if defined(TARGET_PPC64)
6325 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6326 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6327 #endif
6328 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6329 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6330 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6331 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6332 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6333 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6334 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6335 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6336 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6337 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6338 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
6339 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6340 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6341 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6342 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6343 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6344 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6345 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6346 #if defined(TARGET_PPC64)
6347 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6348 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6349 PPC_SEGMENT_64B),
6350 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6351 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6352 PPC_SEGMENT_64B),
6353 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
6354 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
6355 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
6356 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
6357 #endif
6358 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6359 /* XXX Those instructions will need to be handled differently for
6360 * different ISA versions */
6361 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
6362 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
6363 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6364 #if defined(TARGET_PPC64)
6365 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
6366 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
6367 #endif
6368 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6369 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6370 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
6371 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
6372 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
6373 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
6374 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
6375 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
6376 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
6377 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
6378 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
6379 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
6380 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
6381 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
6382 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
6383 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
6384 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
6385 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
6386 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
6387 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
6388 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
6389 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
6390 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
6391 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
6392 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
6393 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
6394 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
6395 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
6396 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
6397 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
6398 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
6399 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
6400 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
6401 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
6402 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
6403 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
6404 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
6405 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
6406 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
6407 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6408 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6409 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
6410 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
6411 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
6412 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
6413 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
6414 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
6415 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
6416 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
6417 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6418 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6419 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
6420 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
6421 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6422 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6423 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
6424 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
6425 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6426 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6427 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6428 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6429 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6430 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6431 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
6432 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
6433 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6434 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6435 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6436 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6437 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6438 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6439 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6440 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6441 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6442 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6443 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6444 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6445 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6446 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6447 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6448 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6449 PPC_NONE, PPC2_BOOKE206),
6450 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6451 PPC_NONE, PPC2_BOOKE206),
6452 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6453 PPC_NONE, PPC2_BOOKE206),
6454 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6455 PPC_NONE, PPC2_BOOKE206),
6456 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6457 PPC_NONE, PPC2_BOOKE206),
6458 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
6459 PPC_NONE, PPC2_PRCNTL),
6460 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
6461 PPC_NONE, PPC2_PRCNTL),
6462 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6463 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6464 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6465 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6466 PPC_BOOKE, PPC2_BOOKE206),
6467 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE),
6468 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6469 PPC_BOOKE, PPC2_BOOKE206),
6470 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6471 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6472 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6473 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6474 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
6475 #if defined(TARGET_PPC64)
6476 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6477 PPC2_ISA300),
6478 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6479 #endif
6481 #undef GEN_INT_ARITH_ADD
6482 #undef GEN_INT_ARITH_ADD_CONST
6483 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
6484 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6485 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
6486 add_ca, compute_ca, compute_ov) \
6487 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6488 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6489 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6490 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6491 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6492 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6493 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6494 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6495 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6496 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6497 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6499 #undef GEN_INT_ARITH_DIVW
6500 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
6501 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6502 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6503 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6504 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6505 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6506 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6507 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6508 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6509 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6510 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6511 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6513 #if defined(TARGET_PPC64)
6514 #undef GEN_INT_ARITH_DIVD
6515 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
6516 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6517 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6518 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6519 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6520 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6522 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6523 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6524 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6525 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6526 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6527 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6529 #undef GEN_INT_ARITH_MUL_HELPER
6530 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
6531 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6532 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6533 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6534 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6535 #endif
6537 #undef GEN_INT_ARITH_SUBF
6538 #undef GEN_INT_ARITH_SUBF_CONST
6539 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
6540 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6541 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
6542 add_ca, compute_ca, compute_ov) \
6543 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6544 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6545 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6546 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6547 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6548 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6549 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6550 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6551 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6552 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6553 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6555 #undef GEN_LOGICAL1
6556 #undef GEN_LOGICAL2
6557 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
6558 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6559 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
6560 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6561 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6562 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6563 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6564 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6565 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6566 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6567 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6568 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6569 #if defined(TARGET_PPC64)
6570 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6571 #endif
6573 #if defined(TARGET_PPC64)
6574 #undef GEN_PPC64_R2
6575 #undef GEN_PPC64_R4
6576 #define GEN_PPC64_R2(name, opc1, opc2) \
6577 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6578 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
6579 PPC_64B)
6580 #define GEN_PPC64_R4(name, opc1, opc2) \
6581 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6582 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
6583 PPC_64B), \
6584 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
6585 PPC_64B), \
6586 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
6587 PPC_64B)
6588 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
6589 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
6590 GEN_PPC64_R4(rldic, 0x1E, 0x04),
6591 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
6592 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
6593 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6594 #endif
6596 #undef GEN_LD
6597 #undef GEN_LDU
6598 #undef GEN_LDUX
6599 #undef GEN_LDX_E
6600 #undef GEN_LDS
6601 #define GEN_LD(name, ldop, opc, type) \
6602 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
6603 #define GEN_LDU(name, ldop, opc, type) \
6604 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
6605 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
6606 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
6607 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
6608 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6609 #define GEN_LDS(name, ldop, op, type) \
6610 GEN_LD(name, ldop, op | 0x20, type) \
6611 GEN_LDU(name, ldop, op | 0x21, type) \
6612 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
6613 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
6615 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
6616 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
6617 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
6618 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
6619 #if defined(TARGET_PPC64)
6620 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
6621 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
6622 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B)
6623 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B)
6624 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6626 /* HV/P7 and later only */
6627 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6628 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6629 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
6630 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
6631 #endif
6632 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6633 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6635 #undef GEN_ST
6636 #undef GEN_STU
6637 #undef GEN_STUX
6638 #undef GEN_STX_E
6639 #undef GEN_STS
6640 #define GEN_ST(name, stop, opc, type) \
6641 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
6642 #define GEN_STU(name, stop, opc, type) \
6643 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
6644 #define GEN_STUX(name, stop, opc2, opc3, type) \
6645 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
6646 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
6647 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6648 #define GEN_STS(name, stop, op, type) \
6649 GEN_ST(name, stop, op | 0x20, type) \
6650 GEN_STU(name, stop, op | 0x21, type) \
6651 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
6652 GEN_STX(name, stop, 0x17, op | 0x00, type)
6654 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
6655 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
6656 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
6657 #if defined(TARGET_PPC64)
6658 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B)
6659 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B)
6660 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6661 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6662 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6663 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6664 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6665 #endif
6666 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6667 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6669 #undef GEN_CRLOGIC
6670 #define GEN_CRLOGIC(name, tcg_op, opc) \
6671 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6672 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6673 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6674 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6675 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6676 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6677 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6678 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6679 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6681 #undef GEN_MAC_HANDLER
6682 #define GEN_MAC_HANDLER(name, opc2, opc3) \
6683 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6684 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6685 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6686 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6687 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6688 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6689 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6690 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6691 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6692 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6693 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6694 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6695 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6696 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6697 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6698 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6699 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6700 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6701 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6702 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6703 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6704 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6705 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6706 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6707 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6708 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6709 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6710 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6711 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6712 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6713 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6714 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6715 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6716 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6717 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6718 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6719 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6720 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6721 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6722 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6723 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6724 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6725 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6727 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6728 PPC_NONE, PPC2_TM),
6729 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
6730 PPC_NONE, PPC2_TM),
6731 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6732 PPC_NONE, PPC2_TM),
6733 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6734 PPC_NONE, PPC2_TM),
6735 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6736 PPC_NONE, PPC2_TM),
6737 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6738 PPC_NONE, PPC2_TM),
6739 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6740 PPC_NONE, PPC2_TM),
6741 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6742 PPC_NONE, PPC2_TM),
6743 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6744 PPC_NONE, PPC2_TM),
6745 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6746 PPC_NONE, PPC2_TM),
6747 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6748 PPC_NONE, PPC2_TM),
6750 #include "translate/fp-ops.inc.c"
6752 #include "translate/vmx-ops.inc.c"
6754 #include "translate/vsx-ops.inc.c"
6756 #include "translate/dfp-ops.inc.c"
6758 #include "translate/spe-ops.inc.c"
6761 #include "helper_regs.h"
6762 #include "translate_init.c"
6764 /*****************************************************************************/
6765 /* Misc PowerPC helpers */
6766 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
6767 int flags)
6769 #define RGPL 4
6770 #define RFPL 4
6772 PowerPCCPU *cpu = POWERPC_CPU(cs);
6773 CPUPPCState *env = &cpu->env;
6774 int i;
6776 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
6777 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n",
6778 env->nip, env->lr, env->ctr, cpu_read_xer(env),
6779 cs->cpu_index);
6780 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
6781 TARGET_FMT_lx " iidx %d didx %d\n",
6782 env->msr, env->spr[SPR_HID0],
6783 env->hflags, env->immu_idx, env->dmmu_idx);
6784 #if !defined(NO_TIMER_DUMP)
6785 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
6786 #if !defined(CONFIG_USER_ONLY)
6787 " DECR %08" PRIu32
6788 #endif
6789 "\n",
6790 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
6791 #if !defined(CONFIG_USER_ONLY)
6792 , cpu_ppc_load_decr(env)
6793 #endif
6795 #endif
6796 for (i = 0; i < 32; i++) {
6797 if ((i & (RGPL - 1)) == 0)
6798 cpu_fprintf(f, "GPR%02d", i);
6799 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
6800 if ((i & (RGPL - 1)) == (RGPL - 1))
6801 cpu_fprintf(f, "\n");
6803 cpu_fprintf(f, "CR ");
6804 for (i = 0; i < 8; i++)
6805 cpu_fprintf(f, "%01x", env->crf[i]);
6806 cpu_fprintf(f, " [");
6807 for (i = 0; i < 8; i++) {
6808 char a = '-';
6809 if (env->crf[i] & 0x08)
6810 a = 'L';
6811 else if (env->crf[i] & 0x04)
6812 a = 'G';
6813 else if (env->crf[i] & 0x02)
6814 a = 'E';
6815 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
6817 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
6818 env->reserve_addr);
6819 for (i = 0; i < 32; i++) {
6820 if ((i & (RFPL - 1)) == 0)
6821 cpu_fprintf(f, "FPR%02d", i);
6822 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
6823 if ((i & (RFPL - 1)) == (RFPL - 1))
6824 cpu_fprintf(f, "\n");
6826 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr);
6827 #if !defined(CONFIG_USER_ONLY)
6828 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
6829 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
6830 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
6831 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
6833 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
6834 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
6835 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
6836 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
6838 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
6839 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
6840 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
6841 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
6843 #if defined(TARGET_PPC64)
6844 if (env->excp_model == POWERPC_EXCP_POWER7 ||
6845 env->excp_model == POWERPC_EXCP_POWER8) {
6846 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n",
6847 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]);
6849 #endif
6850 if (env->excp_model == POWERPC_EXCP_BOOKE) {
6851 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
6852 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
6853 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
6854 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
6856 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
6857 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
6858 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
6859 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
6861 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
6862 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
6863 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
6864 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
6866 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
6867 " EPR " TARGET_FMT_lx "\n",
6868 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
6869 env->spr[SPR_BOOKE_EPR]);
6871 /* FSL-specific */
6872 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
6873 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
6874 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
6875 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
6878 * IVORs are left out as they are large and do not change often --
6879 * they can be read with "p $ivor0", "p $ivor1", etc.
6883 #if defined(TARGET_PPC64)
6884 if (env->flags & POWERPC_FLAG_CFAR) {
6885 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar);
6887 #endif
6889 switch (env->mmu_model) {
6890 case POWERPC_MMU_32B:
6891 case POWERPC_MMU_601:
6892 case POWERPC_MMU_SOFT_6xx:
6893 case POWERPC_MMU_SOFT_74xx:
6894 #if defined(TARGET_PPC64)
6895 case POWERPC_MMU_64B:
6896 case POWERPC_MMU_2_03:
6897 case POWERPC_MMU_2_06:
6898 case POWERPC_MMU_2_06a:
6899 case POWERPC_MMU_2_07:
6900 case POWERPC_MMU_2_07a:
6901 #endif
6902 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " DAR " TARGET_FMT_lx
6903 " DSISR " TARGET_FMT_lx "\n", env->spr[SPR_SDR1],
6904 env->spr[SPR_DAR], env->spr[SPR_DSISR]);
6905 break;
6906 case POWERPC_MMU_BOOKE206:
6907 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
6908 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
6909 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
6910 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
6912 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
6913 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
6914 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
6915 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
6917 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
6918 " TLB1CFG " TARGET_FMT_lx "\n",
6919 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
6920 env->spr[SPR_BOOKE_TLB1CFG]);
6921 break;
6922 default:
6923 break;
6925 #endif
6927 #undef RGPL
6928 #undef RFPL
6931 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f,
6932 fprintf_function cpu_fprintf, int flags)
6934 #if defined(DO_PPC_STATISTICS)
6935 PowerPCCPU *cpu = POWERPC_CPU(cs);
6936 opc_handler_t **t1, **t2, **t3, *handler;
6937 int op1, op2, op3;
6939 t1 = cpu->env.opcodes;
6940 for (op1 = 0; op1 < 64; op1++) {
6941 handler = t1[op1];
6942 if (is_indirect_opcode(handler)) {
6943 t2 = ind_table(handler);
6944 for (op2 = 0; op2 < 32; op2++) {
6945 handler = t2[op2];
6946 if (is_indirect_opcode(handler)) {
6947 t3 = ind_table(handler);
6948 for (op3 = 0; op3 < 32; op3++) {
6949 handler = t3[op3];
6950 if (handler->count == 0)
6951 continue;
6952 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
6953 "%016" PRIx64 " %" PRId64 "\n",
6954 op1, op2, op3, op1, (op3 << 5) | op2,
6955 handler->oname,
6956 handler->count, handler->count);
6958 } else {
6959 if (handler->count == 0)
6960 continue;
6961 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
6962 "%016" PRIx64 " %" PRId64 "\n",
6963 op1, op2, op1, op2, handler->oname,
6964 handler->count, handler->count);
6967 } else {
6968 if (handler->count == 0)
6969 continue;
6970 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
6971 " %" PRId64 "\n",
6972 op1, op1, handler->oname,
6973 handler->count, handler->count);
6976 #endif
6979 /*****************************************************************************/
6980 void gen_intermediate_code(CPUPPCState *env, struct TranslationBlock *tb)
6982 PowerPCCPU *cpu = ppc_env_get_cpu(env);
6983 CPUState *cs = CPU(cpu);
6984 DisasContext ctx, *ctxp = &ctx;
6985 opc_handler_t **table, *handler;
6986 target_ulong pc_start;
6987 int num_insns;
6988 int max_insns;
6990 pc_start = tb->pc;
6991 ctx.nip = pc_start;
6992 ctx.tb = tb;
6993 ctx.exception = POWERPC_EXCP_NONE;
6994 ctx.spr_cb = env->spr_cb;
6995 ctx.pr = msr_pr;
6996 ctx.mem_idx = env->dmmu_idx;
6997 ctx.dr = msr_dr;
6998 #if !defined(CONFIG_USER_ONLY)
6999 ctx.hv = msr_hv || !env->has_hv_mode;
7000 #endif
7001 ctx.insns_flags = env->insns_flags;
7002 ctx.insns_flags2 = env->insns_flags2;
7003 ctx.access_type = -1;
7004 ctx.need_access_type = !(env->mmu_model & POWERPC_MMU_64B);
7005 ctx.le_mode = !!(env->hflags & (1 << MSR_LE));
7006 ctx.default_tcg_memop_mask = ctx.le_mode ? MO_LE : MO_BE;
7007 #if defined(TARGET_PPC64)
7008 ctx.sf_mode = msr_is_64bit(env, env->msr);
7009 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7010 #endif
7011 if (env->mmu_model == POWERPC_MMU_32B ||
7012 env->mmu_model == POWERPC_MMU_601 ||
7013 (env->mmu_model & POWERPC_MMU_64B))
7014 ctx.lazy_tlb_flush = true;
7016 ctx.fpu_enabled = !!msr_fp;
7017 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
7018 ctx.spe_enabled = !!msr_spe;
7019 else
7020 ctx.spe_enabled = false;
7021 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
7022 ctx.altivec_enabled = !!msr_vr;
7023 else
7024 ctx.altivec_enabled = false;
7025 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) {
7026 ctx.vsx_enabled = !!msr_vsx;
7027 } else {
7028 ctx.vsx_enabled = false;
7030 #if defined(TARGET_PPC64)
7031 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) {
7032 ctx.tm_enabled = !!msr_tm;
7033 } else {
7034 ctx.tm_enabled = false;
7036 #endif
7037 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
7038 ctx.singlestep_enabled = CPU_SINGLE_STEP;
7039 else
7040 ctx.singlestep_enabled = 0;
7041 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
7042 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
7043 if (unlikely(cs->singlestep_enabled)) {
7044 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
7046 #if defined (DO_SINGLE_STEP) && 0
7047 /* Single step trace mode */
7048 msr_se = 1;
7049 #endif
7050 num_insns = 0;
7051 max_insns = tb->cflags & CF_COUNT_MASK;
7052 if (max_insns == 0) {
7053 max_insns = CF_COUNT_MASK;
7055 if (max_insns > TCG_MAX_INSNS) {
7056 max_insns = TCG_MAX_INSNS;
7059 gen_tb_start(tb);
7060 tcg_clear_temp_count();
7061 /* Set env in case of segfault during code fetch */
7062 while (ctx.exception == POWERPC_EXCP_NONE && !tcg_op_buf_full()) {
7063 tcg_gen_insn_start(ctx.nip);
7064 num_insns++;
7066 if (unlikely(cpu_breakpoint_test(cs, ctx.nip, BP_ANY))) {
7067 gen_debug_exception(ctxp);
7068 /* The address covered by the breakpoint must be included in
7069 [tb->pc, tb->pc + tb->size) in order to for it to be
7070 properly cleared -- thus we increment the PC here so that
7071 the logic setting tb->size below does the right thing. */
7072 ctx.nip += 4;
7073 break;
7076 LOG_DISAS("----------------\n");
7077 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7078 ctx.nip, ctx.mem_idx, (int)msr_ir);
7079 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO))
7080 gen_io_start();
7081 if (unlikely(need_byteswap(&ctx))) {
7082 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip));
7083 } else {
7084 ctx.opcode = cpu_ldl_code(env, ctx.nip);
7086 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7087 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
7088 opc3(ctx.opcode), opc4(ctx.opcode),
7089 ctx.le_mode ? "little" : "big");
7090 ctx.nip += 4;
7091 table = env->opcodes;
7092 handler = table[opc1(ctx.opcode)];
7093 if (is_indirect_opcode(handler)) {
7094 table = ind_table(handler);
7095 handler = table[opc2(ctx.opcode)];
7096 if (is_indirect_opcode(handler)) {
7097 table = ind_table(handler);
7098 handler = table[opc3(ctx.opcode)];
7099 if (is_indirect_opcode(handler)) {
7100 table = ind_table(handler);
7101 handler = table[opc4(ctx.opcode)];
7105 /* Is opcode *REALLY* valid ? */
7106 if (unlikely(handler->handler == &gen_invalid)) {
7107 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7108 "%02x - %02x - %02x - %02x (%08x) "
7109 TARGET_FMT_lx " %d\n",
7110 opc1(ctx.opcode), opc2(ctx.opcode),
7111 opc3(ctx.opcode), opc4(ctx.opcode),
7112 ctx.opcode, ctx.nip - 4, (int)msr_ir);
7113 } else {
7114 uint32_t inval;
7116 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) {
7117 inval = handler->inval2;
7118 } else {
7119 inval = handler->inval1;
7122 if (unlikely((ctx.opcode & inval) != 0)) {
7123 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7124 "%02x - %02x - %02x - %02x (%08x) "
7125 TARGET_FMT_lx "\n", ctx.opcode & inval,
7126 opc1(ctx.opcode), opc2(ctx.opcode),
7127 opc3(ctx.opcode), opc4(ctx.opcode),
7128 ctx.opcode, ctx.nip - 4);
7129 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
7130 break;
7133 (*(handler->handler))(&ctx);
7134 #if defined(DO_PPC_STATISTICS)
7135 handler->count++;
7136 #endif
7137 /* Check trace mode exceptions */
7138 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
7139 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
7140 ctx.exception != POWERPC_SYSCALL &&
7141 ctx.exception != POWERPC_EXCP_TRAP &&
7142 ctx.exception != POWERPC_EXCP_BRANCH)) {
7143 gen_exception_nip(ctxp, POWERPC_EXCP_TRACE, ctx.nip);
7144 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
7145 (cs->singlestep_enabled) ||
7146 singlestep ||
7147 num_insns >= max_insns)) {
7148 /* if we reach a page boundary or are single stepping, stop
7149 * generation
7151 break;
7153 if (tcg_check_temp_count()) {
7154 fprintf(stderr, "Opcode %02x %02x %02x %02x (%08x) leaked "
7155 "temporaries\n", opc1(ctx.opcode), opc2(ctx.opcode),
7156 opc3(ctx.opcode), opc4(ctx.opcode), ctx.opcode);
7157 exit(1);
7160 if (tb->cflags & CF_LAST_IO)
7161 gen_io_end();
7162 if (ctx.exception == POWERPC_EXCP_NONE) {
7163 gen_goto_tb(&ctx, 0, ctx.nip);
7164 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
7165 if (unlikely(cs->singlestep_enabled)) {
7166 gen_debug_exception(ctxp);
7168 /* Generate the return instruction */
7169 tcg_gen_exit_tb(0);
7171 gen_tb_end(tb, num_insns);
7173 tb->size = ctx.nip - pc_start;
7174 tb->icount = num_insns;
7176 #if defined(DEBUG_DISAS)
7177 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
7178 && qemu_log_in_addr_range(pc_start)) {
7179 int flags;
7180 flags = env->bfd_mach;
7181 flags |= ctx.le_mode << 16;
7182 qemu_log("IN: %s\n", lookup_symbol(pc_start));
7183 log_target_disas(cs, pc_start, ctx.nip - pc_start, flags);
7184 qemu_log("\n");
7186 #endif
7189 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
7190 target_ulong *data)
7192 env->nip = data[0];