net: mcf: check receive buffer size register value
[qemu/ar7.git] / target-ppc / translate.c
blob59e9552d2b2a876e6b6a71a09d1b899e602e1984
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg-op.h"
27 #include "qemu/host-utils.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
33 #include "trace-tcg.h"
34 #include "exec/log.h"
37 #define CPU_SINGLE_STEP 0x1
38 #define CPU_BRANCH_STEP 0x2
39 #define GDBSTUB_SINGLE_STEP 0x4
41 /* Include definitions for instructions classes and implementations flags */
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
45 #ifdef PPC_DEBUG_DISAS
46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
47 #else
48 # define LOG_DISAS(...) do { } while (0)
49 #endif
50 /*****************************************************************************/
51 /* Code translation helpers */
53 /* global register indexes */
54 static TCGv_env cpu_env;
55 static char cpu_reg_names[10*3 + 22*4 /* GPR */
56 + 10*4 + 22*5 /* SPE GPRh */
57 + 10*4 + 22*5 /* FPR */
58 + 2*(10*6 + 22*7) /* AVRh, AVRl */
59 + 10*5 + 22*6 /* VSR */
60 + 8*5 /* CRF */];
61 static TCGv cpu_gpr[32];
62 static TCGv cpu_gprh[32];
63 static TCGv_i64 cpu_fpr[32];
64 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
65 static TCGv_i64 cpu_vsr[32];
66 static TCGv_i32 cpu_crf[8];
67 static TCGv cpu_nip;
68 static TCGv cpu_msr;
69 static TCGv cpu_ctr;
70 static TCGv cpu_lr;
71 #if defined(TARGET_PPC64)
72 static TCGv cpu_cfar;
73 #endif
74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca;
75 static TCGv cpu_reserve;
76 static TCGv cpu_fpscr;
77 static TCGv_i32 cpu_access_type;
79 #include "exec/gen-icount.h"
81 void ppc_translate_init(void)
83 int i;
84 char* p;
85 size_t cpu_reg_names_size;
86 static int done_init = 0;
88 if (done_init)
89 return;
91 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
92 tcg_ctx.tcg_env = cpu_env;
94 p = cpu_reg_names;
95 cpu_reg_names_size = sizeof(cpu_reg_names);
97 for (i = 0; i < 8; i++) {
98 snprintf(p, cpu_reg_names_size, "crf%d", i);
99 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
100 offsetof(CPUPPCState, crf[i]), p);
101 p += 5;
102 cpu_reg_names_size -= 5;
105 for (i = 0; i < 32; i++) {
106 snprintf(p, cpu_reg_names_size, "r%d", i);
107 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
108 offsetof(CPUPPCState, gpr[i]), p);
109 p += (i < 10) ? 3 : 4;
110 cpu_reg_names_size -= (i < 10) ? 3 : 4;
111 snprintf(p, cpu_reg_names_size, "r%dH", i);
112 cpu_gprh[i] = tcg_global_mem_new(cpu_env,
113 offsetof(CPUPPCState, gprh[i]), p);
114 p += (i < 10) ? 4 : 5;
115 cpu_reg_names_size -= (i < 10) ? 4 : 5;
117 snprintf(p, cpu_reg_names_size, "fp%d", i);
118 cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
119 offsetof(CPUPPCState, fpr[i]), p);
120 p += (i < 10) ? 4 : 5;
121 cpu_reg_names_size -= (i < 10) ? 4 : 5;
123 snprintf(p, cpu_reg_names_size, "avr%dH", i);
124 #ifdef HOST_WORDS_BIGENDIAN
125 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env,
126 offsetof(CPUPPCState, avr[i].u64[0]), p);
127 #else
128 cpu_avrh[i] = tcg_global_mem_new_i64(cpu_env,
129 offsetof(CPUPPCState, avr[i].u64[1]), p);
130 #endif
131 p += (i < 10) ? 6 : 7;
132 cpu_reg_names_size -= (i < 10) ? 6 : 7;
134 snprintf(p, cpu_reg_names_size, "avr%dL", i);
135 #ifdef HOST_WORDS_BIGENDIAN
136 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env,
137 offsetof(CPUPPCState, avr[i].u64[1]), p);
138 #else
139 cpu_avrl[i] = tcg_global_mem_new_i64(cpu_env,
140 offsetof(CPUPPCState, avr[i].u64[0]), p);
141 #endif
142 p += (i < 10) ? 6 : 7;
143 cpu_reg_names_size -= (i < 10) ? 6 : 7;
144 snprintf(p, cpu_reg_names_size, "vsr%d", i);
145 cpu_vsr[i] = tcg_global_mem_new_i64(cpu_env,
146 offsetof(CPUPPCState, vsr[i]), p);
147 p += (i < 10) ? 5 : 6;
148 cpu_reg_names_size -= (i < 10) ? 5 : 6;
151 cpu_nip = tcg_global_mem_new(cpu_env,
152 offsetof(CPUPPCState, nip), "nip");
154 cpu_msr = tcg_global_mem_new(cpu_env,
155 offsetof(CPUPPCState, msr), "msr");
157 cpu_ctr = tcg_global_mem_new(cpu_env,
158 offsetof(CPUPPCState, ctr), "ctr");
160 cpu_lr = tcg_global_mem_new(cpu_env,
161 offsetof(CPUPPCState, lr), "lr");
163 #if defined(TARGET_PPC64)
164 cpu_cfar = tcg_global_mem_new(cpu_env,
165 offsetof(CPUPPCState, cfar), "cfar");
166 #endif
168 cpu_xer = tcg_global_mem_new(cpu_env,
169 offsetof(CPUPPCState, xer), "xer");
170 cpu_so = tcg_global_mem_new(cpu_env,
171 offsetof(CPUPPCState, so), "SO");
172 cpu_ov = tcg_global_mem_new(cpu_env,
173 offsetof(CPUPPCState, ov), "OV");
174 cpu_ca = tcg_global_mem_new(cpu_env,
175 offsetof(CPUPPCState, ca), "CA");
177 cpu_reserve = tcg_global_mem_new(cpu_env,
178 offsetof(CPUPPCState, reserve_addr),
179 "reserve_addr");
181 cpu_fpscr = tcg_global_mem_new(cpu_env,
182 offsetof(CPUPPCState, fpscr), "fpscr");
184 cpu_access_type = tcg_global_mem_new_i32(cpu_env,
185 offsetof(CPUPPCState, access_type), "access_type");
187 done_init = 1;
190 /* internal defines */
191 struct DisasContext {
192 struct TranslationBlock *tb;
193 target_ulong nip;
194 uint32_t opcode;
195 uint32_t exception;
196 /* Routine used to access memory */
197 bool pr, hv, dr, le_mode;
198 bool lazy_tlb_flush;
199 bool need_access_type;
200 int mem_idx;
201 int access_type;
202 /* Translation flags */
203 TCGMemOp default_tcg_memop_mask;
204 #if defined(TARGET_PPC64)
205 bool sf_mode;
206 bool has_cfar;
207 #endif
208 bool fpu_enabled;
209 bool altivec_enabled;
210 bool vsx_enabled;
211 bool spe_enabled;
212 bool tm_enabled;
213 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
214 int singlestep_enabled;
215 uint64_t insns_flags;
216 uint64_t insns_flags2;
219 /* Return true iff byteswap is needed in a scalar memop */
220 static inline bool need_byteswap(const DisasContext *ctx)
222 #if defined(TARGET_WORDS_BIGENDIAN)
223 return ctx->le_mode;
224 #else
225 return !ctx->le_mode;
226 #endif
229 /* True when active word size < size of target_long. */
230 #ifdef TARGET_PPC64
231 # define NARROW_MODE(C) (!(C)->sf_mode)
232 #else
233 # define NARROW_MODE(C) 0
234 #endif
236 struct opc_handler_t {
237 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
238 uint32_t inval1;
239 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
240 uint32_t inval2;
241 /* instruction type */
242 uint64_t type;
243 /* extended instruction type */
244 uint64_t type2;
245 /* handler */
246 void (*handler)(DisasContext *ctx);
247 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
248 const char *oname;
249 #endif
250 #if defined(DO_PPC_STATISTICS)
251 uint64_t count;
252 #endif
255 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
257 if (ctx->need_access_type && ctx->access_type != access_type) {
258 tcg_gen_movi_i32(cpu_access_type, access_type);
259 ctx->access_type = access_type;
263 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
265 if (NARROW_MODE(ctx)) {
266 nip = (uint32_t)nip;
268 tcg_gen_movi_tl(cpu_nip, nip);
271 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
273 TCGv_i32 t0, t1;
275 /* These are all synchronous exceptions, we set the PC back to
276 * the faulting instruction
278 if (ctx->exception == POWERPC_EXCP_NONE) {
279 gen_update_nip(ctx, ctx->nip - 4);
281 t0 = tcg_const_i32(excp);
282 t1 = tcg_const_i32(error);
283 gen_helper_raise_exception_err(cpu_env, t0, t1);
284 tcg_temp_free_i32(t0);
285 tcg_temp_free_i32(t1);
286 ctx->exception = (excp);
289 static void gen_exception(DisasContext *ctx, uint32_t excp)
291 TCGv_i32 t0;
293 /* These are all synchronous exceptions, we set the PC back to
294 * the faulting instruction
296 if (ctx->exception == POWERPC_EXCP_NONE) {
297 gen_update_nip(ctx, ctx->nip - 4);
299 t0 = tcg_const_i32(excp);
300 gen_helper_raise_exception(cpu_env, t0);
301 tcg_temp_free_i32(t0);
302 ctx->exception = (excp);
305 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
306 target_ulong nip)
308 TCGv_i32 t0;
310 gen_update_nip(ctx, nip);
311 t0 = tcg_const_i32(excp);
312 gen_helper_raise_exception(cpu_env, t0);
313 tcg_temp_free_i32(t0);
314 ctx->exception = (excp);
317 static void gen_debug_exception(DisasContext *ctx)
319 TCGv_i32 t0;
321 /* These are all synchronous exceptions, we set the PC back to
322 * the faulting instruction
324 if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
325 (ctx->exception != POWERPC_EXCP_SYNC)) {
326 gen_update_nip(ctx, ctx->nip);
328 t0 = tcg_const_i32(EXCP_DEBUG);
329 gen_helper_raise_exception(cpu_env, t0);
330 tcg_temp_free_i32(t0);
333 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
335 /* Will be converted to program check if needed */
336 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
339 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
341 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
344 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
346 /* Will be converted to program check if needed */
347 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
350 /* Stop translation */
351 static inline void gen_stop_exception(DisasContext *ctx)
353 gen_update_nip(ctx, ctx->nip);
354 ctx->exception = POWERPC_EXCP_STOP;
357 #ifndef CONFIG_USER_ONLY
358 /* No need to update nip here, as execution flow will change */
359 static inline void gen_sync_exception(DisasContext *ctx)
361 ctx->exception = POWERPC_EXCP_SYNC;
363 #endif
365 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
366 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
368 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
369 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
371 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
372 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
374 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
375 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
377 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
378 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
380 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
381 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
383 typedef struct opcode_t {
384 unsigned char opc1, opc2, opc3, opc4;
385 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
386 unsigned char pad[4];
387 #endif
388 opc_handler_t handler;
389 const char *oname;
390 } opcode_t;
392 /* Helpers for priv. check */
393 #define GEN_PRIV \
394 do { \
395 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
396 } while (0)
398 #if defined(CONFIG_USER_ONLY)
399 #define CHK_HV GEN_PRIV
400 #define CHK_SV GEN_PRIV
401 #define CHK_HVRM GEN_PRIV
402 #else
403 #define CHK_HV \
404 do { \
405 if (unlikely(ctx->pr || !ctx->hv)) { \
406 GEN_PRIV; \
408 } while (0)
409 #define CHK_SV \
410 do { \
411 if (unlikely(ctx->pr)) { \
412 GEN_PRIV; \
414 } while (0)
415 #define CHK_HVRM \
416 do { \
417 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
418 GEN_PRIV; \
420 } while (0)
421 #endif
423 #define CHK_NONE
426 /*****************************************************************************/
427 /*** Instruction decoding ***/
428 #define EXTRACT_HELPER(name, shift, nb) \
429 static inline uint32_t name(uint32_t opcode) \
431 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
434 #define EXTRACT_SHELPER(name, shift, nb) \
435 static inline int32_t name(uint32_t opcode) \
437 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
440 #define EXTRACT_HELPER_SPLIT(name, shift1, nb1, shift2, nb2) \
441 static inline uint32_t name(uint32_t opcode) \
443 return (((opcode >> (shift1)) & ((1 << (nb1)) - 1)) << nb2) | \
444 ((opcode >> (shift2)) & ((1 << (nb2)) - 1)); \
447 #define EXTRACT_HELPER_DXFORM(name, \
448 d0_bits, shift_op_d0, shift_d0, \
449 d1_bits, shift_op_d1, shift_d1, \
450 d2_bits, shift_op_d2, shift_d2) \
451 static inline int16_t name(uint32_t opcode) \
453 return \
454 (((opcode >> (shift_op_d0)) & ((1 << (d0_bits)) - 1)) << (shift_d0)) | \
455 (((opcode >> (shift_op_d1)) & ((1 << (d1_bits)) - 1)) << (shift_d1)) | \
456 (((opcode >> (shift_op_d2)) & ((1 << (d2_bits)) - 1)) << (shift_d2)); \
460 /* Opcode part 1 */
461 EXTRACT_HELPER(opc1, 26, 6);
462 /* Opcode part 2 */
463 EXTRACT_HELPER(opc2, 1, 5);
464 /* Opcode part 3 */
465 EXTRACT_HELPER(opc3, 6, 5);
466 /* Opcode part 4 */
467 EXTRACT_HELPER(opc4, 16, 5);
468 /* Update Cr0 flags */
469 EXTRACT_HELPER(Rc, 0, 1);
470 /* Update Cr6 flags (Altivec) */
471 EXTRACT_HELPER(Rc21, 10, 1);
472 /* Destination */
473 EXTRACT_HELPER(rD, 21, 5);
474 /* Source */
475 EXTRACT_HELPER(rS, 21, 5);
476 /* First operand */
477 EXTRACT_HELPER(rA, 16, 5);
478 /* Second operand */
479 EXTRACT_HELPER(rB, 11, 5);
480 /* Third operand */
481 EXTRACT_HELPER(rC, 6, 5);
482 /*** Get CRn ***/
483 EXTRACT_HELPER(crfD, 23, 3);
484 EXTRACT_HELPER(crfS, 18, 3);
485 EXTRACT_HELPER(crbD, 21, 5);
486 EXTRACT_HELPER(crbA, 16, 5);
487 EXTRACT_HELPER(crbB, 11, 5);
488 /* SPR / TBL */
489 EXTRACT_HELPER(_SPR, 11, 10);
490 static inline uint32_t SPR(uint32_t opcode)
492 uint32_t sprn = _SPR(opcode);
494 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
496 /*** Get constants ***/
497 /* 16 bits signed immediate value */
498 EXTRACT_SHELPER(SIMM, 0, 16);
499 /* 16 bits unsigned immediate value */
500 EXTRACT_HELPER(UIMM, 0, 16);
501 /* 5 bits signed immediate value */
502 EXTRACT_HELPER(SIMM5, 16, 5);
503 /* 5 bits signed immediate value */
504 EXTRACT_HELPER(UIMM5, 16, 5);
505 /* 4 bits unsigned immediate value */
506 EXTRACT_HELPER(UIMM4, 16, 4);
507 /* Bit count */
508 EXTRACT_HELPER(NB, 11, 5);
509 /* Shift count */
510 EXTRACT_HELPER(SH, 11, 5);
511 /* Vector shift count */
512 EXTRACT_HELPER(VSH, 6, 4);
513 /* Mask start */
514 EXTRACT_HELPER(MB, 6, 5);
515 /* Mask end */
516 EXTRACT_HELPER(ME, 1, 5);
517 /* Trap operand */
518 EXTRACT_HELPER(TO, 21, 5);
520 EXTRACT_HELPER(CRM, 12, 8);
522 #ifndef CONFIG_USER_ONLY
523 EXTRACT_HELPER(SR, 16, 4);
524 #endif
526 /* mtfsf/mtfsfi */
527 EXTRACT_HELPER(FPBF, 23, 3);
528 EXTRACT_HELPER(FPIMM, 12, 4);
529 EXTRACT_HELPER(FPL, 25, 1);
530 EXTRACT_HELPER(FPFLM, 17, 8);
531 EXTRACT_HELPER(FPW, 16, 1);
533 /* addpcis */
534 EXTRACT_HELPER_DXFORM(DX, 10, 6, 6, 5, 16, 1, 1, 0, 0)
535 #if defined(TARGET_PPC64)
536 /* darn */
537 EXTRACT_HELPER(L, 16, 2);
538 #endif
540 /*** Jump target decoding ***/
541 /* Immediate address */
542 static inline target_ulong LI(uint32_t opcode)
544 return (opcode >> 0) & 0x03FFFFFC;
547 static inline uint32_t BD(uint32_t opcode)
549 return (opcode >> 0) & 0xFFFC;
552 EXTRACT_HELPER(BO, 21, 5);
553 EXTRACT_HELPER(BI, 16, 5);
554 /* Absolute/relative address */
555 EXTRACT_HELPER(AA, 1, 1);
556 /* Link */
557 EXTRACT_HELPER(LK, 0, 1);
559 /* DFP Z22-form */
560 EXTRACT_HELPER(DCM, 10, 6)
562 /* DFP Z23-form */
563 EXTRACT_HELPER(RMC, 9, 2)
565 EXTRACT_HELPER_SPLIT(xT, 0, 1, 21, 5);
566 EXTRACT_HELPER_SPLIT(xS, 0, 1, 21, 5);
567 EXTRACT_HELPER_SPLIT(xA, 2, 1, 16, 5);
568 EXTRACT_HELPER_SPLIT(xB, 1, 1, 11, 5);
569 EXTRACT_HELPER_SPLIT(xC, 3, 1, 6, 5);
570 EXTRACT_HELPER(DM, 8, 2);
571 EXTRACT_HELPER(UIM, 16, 2);
572 EXTRACT_HELPER(SHW, 8, 2);
573 EXTRACT_HELPER(SP, 19, 2);
574 EXTRACT_HELPER(IMM8, 11, 8);
576 /*****************************************************************************/
577 /* PowerPC instructions table */
579 #if defined(DO_PPC_STATISTICS)
580 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
582 .opc1 = op1, \
583 .opc2 = op2, \
584 .opc3 = op3, \
585 .opc4 = 0xff, \
586 .handler = { \
587 .inval1 = invl, \
588 .type = _typ, \
589 .type2 = _typ2, \
590 .handler = &gen_##name, \
591 .oname = stringify(name), \
592 }, \
593 .oname = stringify(name), \
595 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
597 .opc1 = op1, \
598 .opc2 = op2, \
599 .opc3 = op3, \
600 .opc4 = 0xff, \
601 .handler = { \
602 .inval1 = invl1, \
603 .inval2 = invl2, \
604 .type = _typ, \
605 .type2 = _typ2, \
606 .handler = &gen_##name, \
607 .oname = stringify(name), \
608 }, \
609 .oname = stringify(name), \
611 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
613 .opc1 = op1, \
614 .opc2 = op2, \
615 .opc3 = op3, \
616 .opc4 = 0xff, \
617 .handler = { \
618 .inval1 = invl, \
619 .type = _typ, \
620 .type2 = _typ2, \
621 .handler = &gen_##name, \
622 .oname = onam, \
623 }, \
624 .oname = onam, \
626 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
628 .opc1 = op1, \
629 .opc2 = op2, \
630 .opc3 = op3, \
631 .opc4 = op4, \
632 .handler = { \
633 .inval1 = invl, \
634 .type = _typ, \
635 .type2 = _typ2, \
636 .handler = &gen_##name, \
637 .oname = stringify(name), \
638 }, \
639 .oname = stringify(name), \
641 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
643 .opc1 = op1, \
644 .opc2 = op2, \
645 .opc3 = op3, \
646 .opc4 = op4, \
647 .handler = { \
648 .inval1 = invl, \
649 .type = _typ, \
650 .type2 = _typ2, \
651 .handler = &gen_##name, \
652 .oname = onam, \
653 }, \
654 .oname = onam, \
656 #else
657 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
659 .opc1 = op1, \
660 .opc2 = op2, \
661 .opc3 = op3, \
662 .opc4 = 0xff, \
663 .handler = { \
664 .inval1 = invl, \
665 .type = _typ, \
666 .type2 = _typ2, \
667 .handler = &gen_##name, \
668 }, \
669 .oname = stringify(name), \
671 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
673 .opc1 = op1, \
674 .opc2 = op2, \
675 .opc3 = op3, \
676 .opc4 = 0xff, \
677 .handler = { \
678 .inval1 = invl1, \
679 .inval2 = invl2, \
680 .type = _typ, \
681 .type2 = _typ2, \
682 .handler = &gen_##name, \
683 }, \
684 .oname = stringify(name), \
686 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
688 .opc1 = op1, \
689 .opc2 = op2, \
690 .opc3 = op3, \
691 .opc4 = 0xff, \
692 .handler = { \
693 .inval1 = invl, \
694 .type = _typ, \
695 .type2 = _typ2, \
696 .handler = &gen_##name, \
697 }, \
698 .oname = onam, \
700 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
702 .opc1 = op1, \
703 .opc2 = op2, \
704 .opc3 = op3, \
705 .opc4 = op4, \
706 .handler = { \
707 .inval1 = invl, \
708 .type = _typ, \
709 .type2 = _typ2, \
710 .handler = &gen_##name, \
711 }, \
712 .oname = stringify(name), \
714 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
716 .opc1 = op1, \
717 .opc2 = op2, \
718 .opc3 = op3, \
719 .opc4 = op4, \
720 .handler = { \
721 .inval1 = invl, \
722 .type = _typ, \
723 .type2 = _typ2, \
724 .handler = &gen_##name, \
725 }, \
726 .oname = onam, \
728 #endif
730 /* SPR load/store helpers */
731 static inline void gen_load_spr(TCGv t, int reg)
733 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
736 static inline void gen_store_spr(int reg, TCGv t)
738 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
741 /* Invalid instruction */
742 static void gen_invalid(DisasContext *ctx)
744 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
747 static opc_handler_t invalid_handler = {
748 .inval1 = 0xFFFFFFFF,
749 .inval2 = 0xFFFFFFFF,
750 .type = PPC_NONE,
751 .type2 = PPC_NONE,
752 .handler = gen_invalid,
755 /*** Integer comparison ***/
757 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
759 TCGv t0 = tcg_temp_new();
760 TCGv_i32 t1 = tcg_temp_new_i32();
762 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
764 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1);
765 tcg_gen_trunc_tl_i32(t1, t0);
766 tcg_gen_shli_i32(t1, t1, CRF_LT);
767 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
769 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1);
770 tcg_gen_trunc_tl_i32(t1, t0);
771 tcg_gen_shli_i32(t1, t1, CRF_GT);
772 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
774 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1);
775 tcg_gen_trunc_tl_i32(t1, t0);
776 tcg_gen_shli_i32(t1, t1, CRF_EQ);
777 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
779 tcg_temp_free(t0);
780 tcg_temp_free_i32(t1);
783 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
785 TCGv t0 = tcg_const_tl(arg1);
786 gen_op_cmp(arg0, t0, s, crf);
787 tcg_temp_free(t0);
790 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
792 TCGv t0, t1;
793 t0 = tcg_temp_new();
794 t1 = tcg_temp_new();
795 if (s) {
796 tcg_gen_ext32s_tl(t0, arg0);
797 tcg_gen_ext32s_tl(t1, arg1);
798 } else {
799 tcg_gen_ext32u_tl(t0, arg0);
800 tcg_gen_ext32u_tl(t1, arg1);
802 gen_op_cmp(t0, t1, s, crf);
803 tcg_temp_free(t1);
804 tcg_temp_free(t0);
807 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
809 TCGv t0 = tcg_const_tl(arg1);
810 gen_op_cmp32(arg0, t0, s, crf);
811 tcg_temp_free(t0);
814 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
816 if (NARROW_MODE(ctx)) {
817 gen_op_cmpi32(reg, 0, 1, 0);
818 } else {
819 gen_op_cmpi(reg, 0, 1, 0);
823 /* cmp */
824 static void gen_cmp(DisasContext *ctx)
826 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
827 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
828 1, crfD(ctx->opcode));
829 } else {
830 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
831 1, crfD(ctx->opcode));
835 /* cmpi */
836 static void gen_cmpi(DisasContext *ctx)
838 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
839 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
840 1, crfD(ctx->opcode));
841 } else {
842 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
843 1, crfD(ctx->opcode));
847 /* cmpl */
848 static void gen_cmpl(DisasContext *ctx)
850 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
851 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
852 0, crfD(ctx->opcode));
853 } else {
854 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
855 0, crfD(ctx->opcode));
859 /* cmpli */
860 static void gen_cmpli(DisasContext *ctx)
862 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
863 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
864 0, crfD(ctx->opcode));
865 } else {
866 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
867 0, crfD(ctx->opcode));
871 /* cmprb - range comparison: isupper, isaplha, islower*/
872 static void gen_cmprb(DisasContext *ctx)
874 TCGv_i32 src1 = tcg_temp_new_i32();
875 TCGv_i32 src2 = tcg_temp_new_i32();
876 TCGv_i32 src2lo = tcg_temp_new_i32();
877 TCGv_i32 src2hi = tcg_temp_new_i32();
878 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
880 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
881 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
883 tcg_gen_andi_i32(src1, src1, 0xFF);
884 tcg_gen_ext8u_i32(src2lo, src2);
885 tcg_gen_shri_i32(src2, src2, 8);
886 tcg_gen_ext8u_i32(src2hi, src2);
888 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
889 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
890 tcg_gen_and_i32(crf, src2lo, src2hi);
892 if (ctx->opcode & 0x00200000) {
893 tcg_gen_shri_i32(src2, src2, 8);
894 tcg_gen_ext8u_i32(src2lo, src2);
895 tcg_gen_shri_i32(src2, src2, 8);
896 tcg_gen_ext8u_i32(src2hi, src2);
897 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
898 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
899 tcg_gen_and_i32(src2lo, src2lo, src2hi);
900 tcg_gen_or_i32(crf, crf, src2lo);
902 tcg_gen_shli_i32(crf, crf, CRF_GT);
903 tcg_temp_free_i32(src1);
904 tcg_temp_free_i32(src2);
905 tcg_temp_free_i32(src2lo);
906 tcg_temp_free_i32(src2hi);
909 #if defined(TARGET_PPC64)
910 /* cmpeqb */
911 static void gen_cmpeqb(DisasContext *ctx)
913 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
914 cpu_gpr[rB(ctx->opcode)]);
916 #endif
918 /* isel (PowerPC 2.03 specification) */
919 static void gen_isel(DisasContext *ctx)
921 uint32_t bi = rC(ctx->opcode);
922 uint32_t mask = 0x08 >> (bi & 0x03);
923 TCGv t0 = tcg_temp_new();
924 TCGv zr;
926 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
927 tcg_gen_andi_tl(t0, t0, mask);
929 zr = tcg_const_tl(0);
930 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
931 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
932 cpu_gpr[rB(ctx->opcode)]);
933 tcg_temp_free(zr);
934 tcg_temp_free(t0);
937 /* cmpb: PowerPC 2.05 specification */
938 static void gen_cmpb(DisasContext *ctx)
940 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
941 cpu_gpr[rB(ctx->opcode)]);
944 /*** Integer arithmetic ***/
946 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
947 TCGv arg1, TCGv arg2, int sub)
949 TCGv t0 = tcg_temp_new();
951 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
952 tcg_gen_xor_tl(t0, arg1, arg2);
953 if (sub) {
954 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
955 } else {
956 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
958 tcg_temp_free(t0);
959 if (NARROW_MODE(ctx)) {
960 tcg_gen_ext32s_tl(cpu_ov, cpu_ov);
962 tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1);
963 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
966 /* Common add function */
967 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
968 TCGv arg2, bool add_ca, bool compute_ca,
969 bool compute_ov, bool compute_rc0)
971 TCGv t0 = ret;
973 if (compute_ca || compute_ov) {
974 t0 = tcg_temp_new();
977 if (compute_ca) {
978 if (NARROW_MODE(ctx)) {
979 /* Caution: a non-obvious corner case of the spec is that we
980 must produce the *entire* 64-bit addition, but produce the
981 carry into bit 32. */
982 TCGv t1 = tcg_temp_new();
983 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
984 tcg_gen_add_tl(t0, arg1, arg2);
985 if (add_ca) {
986 tcg_gen_add_tl(t0, t0, cpu_ca);
988 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */
989 tcg_temp_free(t1);
990 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
991 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
992 } else {
993 TCGv zero = tcg_const_tl(0);
994 if (add_ca) {
995 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero);
996 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero);
997 } else {
998 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero);
1000 tcg_temp_free(zero);
1002 } else {
1003 tcg_gen_add_tl(t0, arg1, arg2);
1004 if (add_ca) {
1005 tcg_gen_add_tl(t0, t0, cpu_ca);
1009 if (compute_ov) {
1010 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1012 if (unlikely(compute_rc0)) {
1013 gen_set_Rc0(ctx, t0);
1016 if (!TCGV_EQUAL(t0, ret)) {
1017 tcg_gen_mov_tl(ret, t0);
1018 tcg_temp_free(t0);
1021 /* Add functions with two operands */
1022 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
1023 static void glue(gen_, name)(DisasContext *ctx) \
1025 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1026 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1027 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1029 /* Add functions with one operand and one immediate */
1030 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
1031 add_ca, compute_ca, compute_ov) \
1032 static void glue(gen_, name)(DisasContext *ctx) \
1034 TCGv t0 = tcg_const_tl(const_val); \
1035 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1036 cpu_gpr[rA(ctx->opcode)], t0, \
1037 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1038 tcg_temp_free(t0); \
1041 /* add add. addo addo. */
1042 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
1043 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
1044 /* addc addc. addco addco. */
1045 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
1046 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
1047 /* adde adde. addeo addeo. */
1048 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
1049 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
1050 /* addme addme. addmeo addmeo. */
1051 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
1052 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
1053 /* addze addze. addzeo addzeo.*/
1054 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
1055 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
1056 /* addi */
1057 static void gen_addi(DisasContext *ctx)
1059 target_long simm = SIMM(ctx->opcode);
1061 if (rA(ctx->opcode) == 0) {
1062 /* li case */
1063 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1064 } else {
1065 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1066 cpu_gpr[rA(ctx->opcode)], simm);
1069 /* addic addic.*/
1070 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1072 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1073 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1074 c, 0, 1, 0, compute_rc0);
1075 tcg_temp_free(c);
1078 static void gen_addic(DisasContext *ctx)
1080 gen_op_addic(ctx, 0);
1083 static void gen_addic_(DisasContext *ctx)
1085 gen_op_addic(ctx, 1);
1088 /* addis */
1089 static void gen_addis(DisasContext *ctx)
1091 target_long simm = SIMM(ctx->opcode);
1093 if (rA(ctx->opcode) == 0) {
1094 /* lis case */
1095 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1096 } else {
1097 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1098 cpu_gpr[rA(ctx->opcode)], simm << 16);
1102 /* addpcis */
1103 static void gen_addpcis(DisasContext *ctx)
1105 target_long d = DX(ctx->opcode);
1107 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->nip + (d << 16));
1110 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1111 TCGv arg2, int sign, int compute_ov)
1113 TCGv_i32 t0 = tcg_temp_new_i32();
1114 TCGv_i32 t1 = tcg_temp_new_i32();
1115 TCGv_i32 t2 = tcg_temp_new_i32();
1116 TCGv_i32 t3 = tcg_temp_new_i32();
1118 tcg_gen_trunc_tl_i32(t0, arg1);
1119 tcg_gen_trunc_tl_i32(t1, arg2);
1120 if (sign) {
1121 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1122 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1123 tcg_gen_and_i32(t2, t2, t3);
1124 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1125 tcg_gen_or_i32(t2, t2, t3);
1126 tcg_gen_movi_i32(t3, 0);
1127 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1128 tcg_gen_div_i32(t3, t0, t1);
1129 tcg_gen_extu_i32_tl(ret, t3);
1130 } else {
1131 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1132 tcg_gen_movi_i32(t3, 0);
1133 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1134 tcg_gen_divu_i32(t3, t0, t1);
1135 tcg_gen_extu_i32_tl(ret, t3);
1137 if (compute_ov) {
1138 tcg_gen_extu_i32_tl(cpu_ov, t2);
1139 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1141 tcg_temp_free_i32(t0);
1142 tcg_temp_free_i32(t1);
1143 tcg_temp_free_i32(t2);
1144 tcg_temp_free_i32(t3);
1146 if (unlikely(Rc(ctx->opcode) != 0))
1147 gen_set_Rc0(ctx, ret);
1149 /* Div functions */
1150 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1151 static void glue(gen_, name)(DisasContext *ctx) \
1153 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1154 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1155 sign, compute_ov); \
1157 /* divwu divwu. divwuo divwuo. */
1158 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1159 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1160 /* divw divw. divwo divwo. */
1161 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1162 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1164 /* div[wd]eu[o][.] */
1165 #define GEN_DIVE(name, hlpr, compute_ov) \
1166 static void gen_##name(DisasContext *ctx) \
1168 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
1169 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
1170 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1171 tcg_temp_free_i32(t0); \
1172 if (unlikely(Rc(ctx->opcode) != 0)) { \
1173 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1177 GEN_DIVE(divweu, divweu, 0);
1178 GEN_DIVE(divweuo, divweu, 1);
1179 GEN_DIVE(divwe, divwe, 0);
1180 GEN_DIVE(divweo, divwe, 1);
1182 #if defined(TARGET_PPC64)
1183 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1184 TCGv arg2, int sign, int compute_ov)
1186 TCGv_i64 t0 = tcg_temp_new_i64();
1187 TCGv_i64 t1 = tcg_temp_new_i64();
1188 TCGv_i64 t2 = tcg_temp_new_i64();
1189 TCGv_i64 t3 = tcg_temp_new_i64();
1191 tcg_gen_mov_i64(t0, arg1);
1192 tcg_gen_mov_i64(t1, arg2);
1193 if (sign) {
1194 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1195 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1196 tcg_gen_and_i64(t2, t2, t3);
1197 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1198 tcg_gen_or_i64(t2, t2, t3);
1199 tcg_gen_movi_i64(t3, 0);
1200 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1201 tcg_gen_div_i64(ret, t0, t1);
1202 } else {
1203 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1204 tcg_gen_movi_i64(t3, 0);
1205 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1206 tcg_gen_divu_i64(ret, t0, t1);
1208 if (compute_ov) {
1209 tcg_gen_mov_tl(cpu_ov, t2);
1210 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1212 tcg_temp_free_i64(t0);
1213 tcg_temp_free_i64(t1);
1214 tcg_temp_free_i64(t2);
1215 tcg_temp_free_i64(t3);
1217 if (unlikely(Rc(ctx->opcode) != 0))
1218 gen_set_Rc0(ctx, ret);
1221 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1222 static void glue(gen_, name)(DisasContext *ctx) \
1224 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1225 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1226 sign, compute_ov); \
1228 /* divwu divwu. divwuo divwuo. */
1229 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1230 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1231 /* divw divw. divwo divwo. */
1232 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1233 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1235 GEN_DIVE(divdeu, divdeu, 0);
1236 GEN_DIVE(divdeuo, divdeu, 1);
1237 GEN_DIVE(divde, divde, 0);
1238 GEN_DIVE(divdeo, divde, 1);
1239 #endif
1241 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1242 TCGv arg2, int sign)
1244 TCGv_i32 t0 = tcg_temp_new_i32();
1245 TCGv_i32 t1 = tcg_temp_new_i32();
1247 tcg_gen_trunc_tl_i32(t0, arg1);
1248 tcg_gen_trunc_tl_i32(t1, arg2);
1249 if (sign) {
1250 TCGv_i32 t2 = tcg_temp_new_i32();
1251 TCGv_i32 t3 = tcg_temp_new_i32();
1252 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1253 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1254 tcg_gen_and_i32(t2, t2, t3);
1255 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1256 tcg_gen_or_i32(t2, t2, t3);
1257 tcg_gen_movi_i32(t3, 0);
1258 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1259 tcg_gen_rem_i32(t3, t0, t1);
1260 tcg_gen_ext_i32_tl(ret, t3);
1261 tcg_temp_free_i32(t2);
1262 tcg_temp_free_i32(t3);
1263 } else {
1264 TCGv_i32 t2 = tcg_const_i32(1);
1265 TCGv_i32 t3 = tcg_const_i32(0);
1266 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1267 tcg_gen_remu_i32(t3, t0, t1);
1268 tcg_gen_extu_i32_tl(ret, t3);
1269 tcg_temp_free_i32(t2);
1270 tcg_temp_free_i32(t3);
1272 tcg_temp_free_i32(t0);
1273 tcg_temp_free_i32(t1);
1276 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
1277 static void glue(gen_, name)(DisasContext *ctx) \
1279 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
1280 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1281 sign); \
1284 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1285 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1287 #if defined(TARGET_PPC64)
1288 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1289 TCGv arg2, int sign)
1291 TCGv_i64 t0 = tcg_temp_new_i64();
1292 TCGv_i64 t1 = tcg_temp_new_i64();
1294 tcg_gen_mov_i64(t0, arg1);
1295 tcg_gen_mov_i64(t1, arg2);
1296 if (sign) {
1297 TCGv_i64 t2 = tcg_temp_new_i64();
1298 TCGv_i64 t3 = tcg_temp_new_i64();
1299 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1300 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1301 tcg_gen_and_i64(t2, t2, t3);
1302 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1303 tcg_gen_or_i64(t2, t2, t3);
1304 tcg_gen_movi_i64(t3, 0);
1305 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1306 tcg_gen_rem_i64(ret, t0, t1);
1307 tcg_temp_free_i64(t2);
1308 tcg_temp_free_i64(t3);
1309 } else {
1310 TCGv_i64 t2 = tcg_const_i64(1);
1311 TCGv_i64 t3 = tcg_const_i64(0);
1312 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1313 tcg_gen_remu_i64(ret, t0, t1);
1314 tcg_temp_free_i64(t2);
1315 tcg_temp_free_i64(t3);
1317 tcg_temp_free_i64(t0);
1318 tcg_temp_free_i64(t1);
1321 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
1322 static void glue(gen_, name)(DisasContext *ctx) \
1324 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
1325 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1326 sign); \
1329 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1330 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1331 #endif
1333 /* mulhw mulhw. */
1334 static void gen_mulhw(DisasContext *ctx)
1336 TCGv_i32 t0 = tcg_temp_new_i32();
1337 TCGv_i32 t1 = tcg_temp_new_i32();
1339 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1340 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1341 tcg_gen_muls2_i32(t0, t1, t0, t1);
1342 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1343 tcg_temp_free_i32(t0);
1344 tcg_temp_free_i32(t1);
1345 if (unlikely(Rc(ctx->opcode) != 0))
1346 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1349 /* mulhwu mulhwu. */
1350 static void gen_mulhwu(DisasContext *ctx)
1352 TCGv_i32 t0 = tcg_temp_new_i32();
1353 TCGv_i32 t1 = tcg_temp_new_i32();
1355 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1356 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1357 tcg_gen_mulu2_i32(t0, t1, t0, t1);
1358 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1359 tcg_temp_free_i32(t0);
1360 tcg_temp_free_i32(t1);
1361 if (unlikely(Rc(ctx->opcode) != 0))
1362 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1365 /* mullw mullw. */
1366 static void gen_mullw(DisasContext *ctx)
1368 #if defined(TARGET_PPC64)
1369 TCGv_i64 t0, t1;
1370 t0 = tcg_temp_new_i64();
1371 t1 = tcg_temp_new_i64();
1372 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1373 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1374 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1375 tcg_temp_free(t0);
1376 tcg_temp_free(t1);
1377 #else
1378 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1379 cpu_gpr[rB(ctx->opcode)]);
1380 #endif
1381 if (unlikely(Rc(ctx->opcode) != 0))
1382 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1385 /* mullwo mullwo. */
1386 static void gen_mullwo(DisasContext *ctx)
1388 TCGv_i32 t0 = tcg_temp_new_i32();
1389 TCGv_i32 t1 = tcg_temp_new_i32();
1391 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1392 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1393 tcg_gen_muls2_i32(t0, t1, t0, t1);
1394 #if defined(TARGET_PPC64)
1395 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1396 #else
1397 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
1398 #endif
1400 tcg_gen_sari_i32(t0, t0, 31);
1401 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1402 tcg_gen_extu_i32_tl(cpu_ov, t0);
1403 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1405 tcg_temp_free_i32(t0);
1406 tcg_temp_free_i32(t1);
1407 if (unlikely(Rc(ctx->opcode) != 0))
1408 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1411 /* mulli */
1412 static void gen_mulli(DisasContext *ctx)
1414 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1415 SIMM(ctx->opcode));
1418 #if defined(TARGET_PPC64)
1419 /* mulhd mulhd. */
1420 static void gen_mulhd(DisasContext *ctx)
1422 TCGv lo = tcg_temp_new();
1423 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1424 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1425 tcg_temp_free(lo);
1426 if (unlikely(Rc(ctx->opcode) != 0)) {
1427 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1431 /* mulhdu mulhdu. */
1432 static void gen_mulhdu(DisasContext *ctx)
1434 TCGv lo = tcg_temp_new();
1435 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1436 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1437 tcg_temp_free(lo);
1438 if (unlikely(Rc(ctx->opcode) != 0)) {
1439 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1443 /* mulld mulld. */
1444 static void gen_mulld(DisasContext *ctx)
1446 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1447 cpu_gpr[rB(ctx->opcode)]);
1448 if (unlikely(Rc(ctx->opcode) != 0))
1449 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1452 /* mulldo mulldo. */
1453 static void gen_mulldo(DisasContext *ctx)
1455 TCGv_i64 t0 = tcg_temp_new_i64();
1456 TCGv_i64 t1 = tcg_temp_new_i64();
1458 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
1459 cpu_gpr[rB(ctx->opcode)]);
1460 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
1462 tcg_gen_sari_i64(t0, t0, 63);
1463 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
1464 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1466 tcg_temp_free_i64(t0);
1467 tcg_temp_free_i64(t1);
1469 if (unlikely(Rc(ctx->opcode) != 0)) {
1470 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1473 #endif
1475 /* Common subf function */
1476 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1477 TCGv arg2, bool add_ca, bool compute_ca,
1478 bool compute_ov, bool compute_rc0)
1480 TCGv t0 = ret;
1482 if (compute_ca || compute_ov) {
1483 t0 = tcg_temp_new();
1486 if (compute_ca) {
1487 /* dest = ~arg1 + arg2 [+ ca]. */
1488 if (NARROW_MODE(ctx)) {
1489 /* Caution: a non-obvious corner case of the spec is that we
1490 must produce the *entire* 64-bit addition, but produce the
1491 carry into bit 32. */
1492 TCGv inv1 = tcg_temp_new();
1493 TCGv t1 = tcg_temp_new();
1494 tcg_gen_not_tl(inv1, arg1);
1495 if (add_ca) {
1496 tcg_gen_add_tl(t0, arg2, cpu_ca);
1497 } else {
1498 tcg_gen_addi_tl(t0, arg2, 1);
1500 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
1501 tcg_gen_add_tl(t0, t0, inv1);
1502 tcg_temp_free(inv1);
1503 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
1504 tcg_temp_free(t1);
1505 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
1506 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
1507 } else if (add_ca) {
1508 TCGv zero, inv1 = tcg_temp_new();
1509 tcg_gen_not_tl(inv1, arg1);
1510 zero = tcg_const_tl(0);
1511 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1512 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1513 tcg_temp_free(zero);
1514 tcg_temp_free(inv1);
1515 } else {
1516 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1517 tcg_gen_sub_tl(t0, arg2, arg1);
1519 } else if (add_ca) {
1520 /* Since we're ignoring carry-out, we can simplify the
1521 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1522 tcg_gen_sub_tl(t0, arg2, arg1);
1523 tcg_gen_add_tl(t0, t0, cpu_ca);
1524 tcg_gen_subi_tl(t0, t0, 1);
1525 } else {
1526 tcg_gen_sub_tl(t0, arg2, arg1);
1529 if (compute_ov) {
1530 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1532 if (unlikely(compute_rc0)) {
1533 gen_set_Rc0(ctx, t0);
1536 if (!TCGV_EQUAL(t0, ret)) {
1537 tcg_gen_mov_tl(ret, t0);
1538 tcg_temp_free(t0);
1541 /* Sub functions with Two operands functions */
1542 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1543 static void glue(gen_, name)(DisasContext *ctx) \
1545 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1546 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1547 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1549 /* Sub functions with one operand and one immediate */
1550 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1551 add_ca, compute_ca, compute_ov) \
1552 static void glue(gen_, name)(DisasContext *ctx) \
1554 TCGv t0 = tcg_const_tl(const_val); \
1555 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1556 cpu_gpr[rA(ctx->opcode)], t0, \
1557 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1558 tcg_temp_free(t0); \
1560 /* subf subf. subfo subfo. */
1561 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1562 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1563 /* subfc subfc. subfco subfco. */
1564 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1565 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1566 /* subfe subfe. subfeo subfo. */
1567 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1568 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1569 /* subfme subfme. subfmeo subfmeo. */
1570 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1571 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1572 /* subfze subfze. subfzeo subfzeo.*/
1573 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1574 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1576 /* subfic */
1577 static void gen_subfic(DisasContext *ctx)
1579 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1580 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1581 c, 0, 1, 0, 0);
1582 tcg_temp_free(c);
1585 /* neg neg. nego nego. */
1586 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
1588 TCGv zero = tcg_const_tl(0);
1589 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1590 zero, 0, 0, compute_ov, Rc(ctx->opcode));
1591 tcg_temp_free(zero);
1594 static void gen_neg(DisasContext *ctx)
1596 gen_op_arith_neg(ctx, 0);
1599 static void gen_nego(DisasContext *ctx)
1601 gen_op_arith_neg(ctx, 1);
1604 /*** Integer logical ***/
1605 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1606 static void glue(gen_, name)(DisasContext *ctx) \
1608 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1609 cpu_gpr[rB(ctx->opcode)]); \
1610 if (unlikely(Rc(ctx->opcode) != 0)) \
1611 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1614 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1615 static void glue(gen_, name)(DisasContext *ctx) \
1617 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1618 if (unlikely(Rc(ctx->opcode) != 0)) \
1619 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1622 /* and & and. */
1623 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1624 /* andc & andc. */
1625 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1627 /* andi. */
1628 static void gen_andi_(DisasContext *ctx)
1630 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1631 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1634 /* andis. */
1635 static void gen_andis_(DisasContext *ctx)
1637 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1638 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1641 /* cntlzw */
1642 static void gen_cntlzw(DisasContext *ctx)
1644 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1645 if (unlikely(Rc(ctx->opcode) != 0))
1646 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1649 /* cnttzw */
1650 static void gen_cnttzw(DisasContext *ctx)
1652 gen_helper_cnttzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1653 if (unlikely(Rc(ctx->opcode) != 0)) {
1654 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1658 /* eqv & eqv. */
1659 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1660 /* extsb & extsb. */
1661 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1662 /* extsh & extsh. */
1663 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1664 /* nand & nand. */
1665 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1666 /* nor & nor. */
1667 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1669 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
1670 static void gen_pause(DisasContext *ctx)
1672 TCGv_i32 t0 = tcg_const_i32(0);
1673 tcg_gen_st_i32(t0, cpu_env,
1674 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
1675 tcg_temp_free_i32(t0);
1677 /* Stop translation, this gives other CPUs a chance to run */
1678 gen_exception_nip(ctx, EXCP_HLT, ctx->nip);
1680 #endif /* defined(TARGET_PPC64) */
1682 /* or & or. */
1683 static void gen_or(DisasContext *ctx)
1685 int rs, ra, rb;
1687 rs = rS(ctx->opcode);
1688 ra = rA(ctx->opcode);
1689 rb = rB(ctx->opcode);
1690 /* Optimisation for mr. ri case */
1691 if (rs != ra || rs != rb) {
1692 if (rs != rb)
1693 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1694 else
1695 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1696 if (unlikely(Rc(ctx->opcode) != 0))
1697 gen_set_Rc0(ctx, cpu_gpr[ra]);
1698 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1699 gen_set_Rc0(ctx, cpu_gpr[rs]);
1700 #if defined(TARGET_PPC64)
1701 } else if (rs != 0) { /* 0 is nop */
1702 int prio = 0;
1704 switch (rs) {
1705 case 1:
1706 /* Set process priority to low */
1707 prio = 2;
1708 break;
1709 case 6:
1710 /* Set process priority to medium-low */
1711 prio = 3;
1712 break;
1713 case 2:
1714 /* Set process priority to normal */
1715 prio = 4;
1716 break;
1717 #if !defined(CONFIG_USER_ONLY)
1718 case 31:
1719 if (!ctx->pr) {
1720 /* Set process priority to very low */
1721 prio = 1;
1723 break;
1724 case 5:
1725 if (!ctx->pr) {
1726 /* Set process priority to medium-hight */
1727 prio = 5;
1729 break;
1730 case 3:
1731 if (!ctx->pr) {
1732 /* Set process priority to high */
1733 prio = 6;
1735 break;
1736 case 7:
1737 if (ctx->hv && !ctx->pr) {
1738 /* Set process priority to very high */
1739 prio = 7;
1741 break;
1742 #endif
1743 default:
1744 break;
1746 if (prio) {
1747 TCGv t0 = tcg_temp_new();
1748 gen_load_spr(t0, SPR_PPR);
1749 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1750 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1751 gen_store_spr(SPR_PPR, t0);
1752 tcg_temp_free(t0);
1754 #if !defined(CONFIG_USER_ONLY)
1755 /* Pause out of TCG otherwise spin loops with smt_low eat too much
1756 * CPU and the kernel hangs. This applies to all encodings other
1757 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30),
1758 * and all currently undefined.
1760 gen_pause(ctx);
1761 #endif
1762 #endif
1765 /* orc & orc. */
1766 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1768 /* xor & xor. */
1769 static void gen_xor(DisasContext *ctx)
1771 /* Optimisation for "set to zero" case */
1772 if (rS(ctx->opcode) != rB(ctx->opcode))
1773 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1774 else
1775 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1776 if (unlikely(Rc(ctx->opcode) != 0))
1777 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1780 /* ori */
1781 static void gen_ori(DisasContext *ctx)
1783 target_ulong uimm = UIMM(ctx->opcode);
1785 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1786 return;
1788 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1791 /* oris */
1792 static void gen_oris(DisasContext *ctx)
1794 target_ulong uimm = UIMM(ctx->opcode);
1796 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1797 /* NOP */
1798 return;
1800 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1803 /* xori */
1804 static void gen_xori(DisasContext *ctx)
1806 target_ulong uimm = UIMM(ctx->opcode);
1808 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1809 /* NOP */
1810 return;
1812 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1815 /* xoris */
1816 static void gen_xoris(DisasContext *ctx)
1818 target_ulong uimm = UIMM(ctx->opcode);
1820 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1821 /* NOP */
1822 return;
1824 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1827 /* popcntb : PowerPC 2.03 specification */
1828 static void gen_popcntb(DisasContext *ctx)
1830 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1833 static void gen_popcntw(DisasContext *ctx)
1835 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1838 #if defined(TARGET_PPC64)
1839 /* popcntd: PowerPC 2.06 specification */
1840 static void gen_popcntd(DisasContext *ctx)
1842 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1844 #endif
1846 /* prtyw: PowerPC 2.05 specification */
1847 static void gen_prtyw(DisasContext *ctx)
1849 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1850 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1851 TCGv t0 = tcg_temp_new();
1852 tcg_gen_shri_tl(t0, rs, 16);
1853 tcg_gen_xor_tl(ra, rs, t0);
1854 tcg_gen_shri_tl(t0, ra, 8);
1855 tcg_gen_xor_tl(ra, ra, t0);
1856 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
1857 tcg_temp_free(t0);
1860 #if defined(TARGET_PPC64)
1861 /* prtyd: PowerPC 2.05 specification */
1862 static void gen_prtyd(DisasContext *ctx)
1864 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1865 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1866 TCGv t0 = tcg_temp_new();
1867 tcg_gen_shri_tl(t0, rs, 32);
1868 tcg_gen_xor_tl(ra, rs, t0);
1869 tcg_gen_shri_tl(t0, ra, 16);
1870 tcg_gen_xor_tl(ra, ra, t0);
1871 tcg_gen_shri_tl(t0, ra, 8);
1872 tcg_gen_xor_tl(ra, ra, t0);
1873 tcg_gen_andi_tl(ra, ra, 1);
1874 tcg_temp_free(t0);
1876 #endif
1878 #if defined(TARGET_PPC64)
1879 /* bpermd */
1880 static void gen_bpermd(DisasContext *ctx)
1882 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
1883 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1885 #endif
1887 #if defined(TARGET_PPC64)
1888 /* extsw & extsw. */
1889 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1891 /* cntlzd */
1892 static void gen_cntlzd(DisasContext *ctx)
1894 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1895 if (unlikely(Rc(ctx->opcode) != 0))
1896 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1899 /* cnttzd */
1900 static void gen_cnttzd(DisasContext *ctx)
1902 gen_helper_cnttzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1903 if (unlikely(Rc(ctx->opcode) != 0)) {
1904 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1908 /* darn */
1909 static void gen_darn(DisasContext *ctx)
1911 int l = L(ctx->opcode);
1913 if (l == 0) {
1914 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
1915 } else if (l <= 2) {
1916 /* Return 64-bit random for both CRN and RRN */
1917 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
1918 } else {
1919 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
1922 #endif
1924 /*** Integer rotate ***/
1926 /* rlwimi & rlwimi. */
1927 static void gen_rlwimi(DisasContext *ctx)
1929 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
1930 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
1931 uint32_t sh = SH(ctx->opcode);
1932 uint32_t mb = MB(ctx->opcode);
1933 uint32_t me = ME(ctx->opcode);
1935 if (sh == (31-me) && mb <= me) {
1936 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
1937 } else {
1938 target_ulong mask;
1939 TCGv t1;
1941 #if defined(TARGET_PPC64)
1942 mb += 32;
1943 me += 32;
1944 #endif
1945 mask = MASK(mb, me);
1947 t1 = tcg_temp_new();
1948 if (mask <= 0xffffffffu) {
1949 TCGv_i32 t0 = tcg_temp_new_i32();
1950 tcg_gen_trunc_tl_i32(t0, t_rs);
1951 tcg_gen_rotli_i32(t0, t0, sh);
1952 tcg_gen_extu_i32_tl(t1, t0);
1953 tcg_temp_free_i32(t0);
1954 } else {
1955 #if defined(TARGET_PPC64)
1956 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
1957 tcg_gen_rotli_i64(t1, t1, sh);
1958 #else
1959 g_assert_not_reached();
1960 #endif
1963 tcg_gen_andi_tl(t1, t1, mask);
1964 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
1965 tcg_gen_or_tl(t_ra, t_ra, t1);
1966 tcg_temp_free(t1);
1968 if (unlikely(Rc(ctx->opcode) != 0)) {
1969 gen_set_Rc0(ctx, t_ra);
1973 /* rlwinm & rlwinm. */
1974 static void gen_rlwinm(DisasContext *ctx)
1976 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
1977 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
1978 uint32_t sh = SH(ctx->opcode);
1979 uint32_t mb = MB(ctx->opcode);
1980 uint32_t me = ME(ctx->opcode);
1982 if (mb == 0 && me == (31 - sh)) {
1983 tcg_gen_shli_tl(t_ra, t_rs, sh);
1984 tcg_gen_ext32u_tl(t_ra, t_ra);
1985 } else if (sh != 0 && me == 31 && sh == (32 - mb)) {
1986 tcg_gen_ext32u_tl(t_ra, t_rs);
1987 tcg_gen_shri_tl(t_ra, t_ra, mb);
1988 } else {
1989 target_ulong mask;
1990 #if defined(TARGET_PPC64)
1991 mb += 32;
1992 me += 32;
1993 #endif
1994 mask = MASK(mb, me);
1996 if (mask <= 0xffffffffu) {
1997 TCGv_i32 t0 = tcg_temp_new_i32();
1998 tcg_gen_trunc_tl_i32(t0, t_rs);
1999 tcg_gen_rotli_i32(t0, t0, sh);
2000 tcg_gen_andi_i32(t0, t0, mask);
2001 tcg_gen_extu_i32_tl(t_ra, t0);
2002 tcg_temp_free_i32(t0);
2003 } else {
2004 #if defined(TARGET_PPC64)
2005 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2006 tcg_gen_rotli_i64(t_ra, t_ra, sh);
2007 tcg_gen_andi_i64(t_ra, t_ra, mask);
2008 #else
2009 g_assert_not_reached();
2010 #endif
2013 if (unlikely(Rc(ctx->opcode) != 0)) {
2014 gen_set_Rc0(ctx, t_ra);
2018 /* rlwnm & rlwnm. */
2019 static void gen_rlwnm(DisasContext *ctx)
2021 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2022 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2023 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2024 uint32_t mb = MB(ctx->opcode);
2025 uint32_t me = ME(ctx->opcode);
2026 target_ulong mask;
2028 #if defined(TARGET_PPC64)
2029 mb += 32;
2030 me += 32;
2031 #endif
2032 mask = MASK(mb, me);
2034 if (mask <= 0xffffffffu) {
2035 TCGv_i32 t0 = tcg_temp_new_i32();
2036 TCGv_i32 t1 = tcg_temp_new_i32();
2037 tcg_gen_trunc_tl_i32(t0, t_rb);
2038 tcg_gen_trunc_tl_i32(t1, t_rs);
2039 tcg_gen_andi_i32(t0, t0, 0x1f);
2040 tcg_gen_rotl_i32(t1, t1, t0);
2041 tcg_gen_extu_i32_tl(t_ra, t1);
2042 tcg_temp_free_i32(t0);
2043 tcg_temp_free_i32(t1);
2044 } else {
2045 #if defined(TARGET_PPC64)
2046 TCGv_i64 t0 = tcg_temp_new_i64();
2047 tcg_gen_andi_i64(t0, t_rb, 0x1f);
2048 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2049 tcg_gen_rotl_i64(t_ra, t_ra, t0);
2050 tcg_temp_free_i64(t0);
2051 #else
2052 g_assert_not_reached();
2053 #endif
2056 tcg_gen_andi_tl(t_ra, t_ra, mask);
2058 if (unlikely(Rc(ctx->opcode) != 0)) {
2059 gen_set_Rc0(ctx, t_ra);
2063 #if defined(TARGET_PPC64)
2064 #define GEN_PPC64_R2(name, opc1, opc2) \
2065 static void glue(gen_, name##0)(DisasContext *ctx) \
2067 gen_##name(ctx, 0); \
2070 static void glue(gen_, name##1)(DisasContext *ctx) \
2072 gen_##name(ctx, 1); \
2074 #define GEN_PPC64_R4(name, opc1, opc2) \
2075 static void glue(gen_, name##0)(DisasContext *ctx) \
2077 gen_##name(ctx, 0, 0); \
2080 static void glue(gen_, name##1)(DisasContext *ctx) \
2082 gen_##name(ctx, 0, 1); \
2085 static void glue(gen_, name##2)(DisasContext *ctx) \
2087 gen_##name(ctx, 1, 0); \
2090 static void glue(gen_, name##3)(DisasContext *ctx) \
2092 gen_##name(ctx, 1, 1); \
2095 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2097 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2098 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2100 if (sh != 0 && mb == 0 && me == (63 - sh)) {
2101 tcg_gen_shli_tl(t_ra, t_rs, sh);
2102 } else if (sh != 0 && me == 63 && sh == (64 - mb)) {
2103 tcg_gen_shri_tl(t_ra, t_rs, mb);
2104 } else {
2105 tcg_gen_rotli_tl(t_ra, t_rs, sh);
2106 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2108 if (unlikely(Rc(ctx->opcode) != 0)) {
2109 gen_set_Rc0(ctx, t_ra);
2113 /* rldicl - rldicl. */
2114 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2116 uint32_t sh, mb;
2118 sh = SH(ctx->opcode) | (shn << 5);
2119 mb = MB(ctx->opcode) | (mbn << 5);
2120 gen_rldinm(ctx, mb, 63, sh);
2122 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2124 /* rldicr - rldicr. */
2125 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2127 uint32_t sh, me;
2129 sh = SH(ctx->opcode) | (shn << 5);
2130 me = MB(ctx->opcode) | (men << 5);
2131 gen_rldinm(ctx, 0, me, sh);
2133 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2135 /* rldic - rldic. */
2136 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2138 uint32_t sh, mb;
2140 sh = SH(ctx->opcode) | (shn << 5);
2141 mb = MB(ctx->opcode) | (mbn << 5);
2142 gen_rldinm(ctx, mb, 63 - sh, sh);
2144 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2146 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2148 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2149 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2150 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2151 TCGv t0;
2153 t0 = tcg_temp_new();
2154 tcg_gen_andi_tl(t0, t_rb, 0x3f);
2155 tcg_gen_rotl_tl(t_ra, t_rs, t0);
2156 tcg_temp_free(t0);
2158 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2159 if (unlikely(Rc(ctx->opcode) != 0)) {
2160 gen_set_Rc0(ctx, t_ra);
2164 /* rldcl - rldcl. */
2165 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2167 uint32_t mb;
2169 mb = MB(ctx->opcode) | (mbn << 5);
2170 gen_rldnm(ctx, mb, 63);
2172 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2174 /* rldcr - rldcr. */
2175 static inline void gen_rldcr(DisasContext *ctx, int men)
2177 uint32_t me;
2179 me = MB(ctx->opcode) | (men << 5);
2180 gen_rldnm(ctx, 0, me);
2182 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2184 /* rldimi - rldimi. */
2185 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2187 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2188 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2189 uint32_t sh = SH(ctx->opcode) | (shn << 5);
2190 uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2191 uint32_t me = 63 - sh;
2193 if (mb <= me) {
2194 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2195 } else {
2196 target_ulong mask = MASK(mb, me);
2197 TCGv t1 = tcg_temp_new();
2199 tcg_gen_rotli_tl(t1, t_rs, sh);
2200 tcg_gen_andi_tl(t1, t1, mask);
2201 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2202 tcg_gen_or_tl(t_ra, t_ra, t1);
2203 tcg_temp_free(t1);
2205 if (unlikely(Rc(ctx->opcode) != 0)) {
2206 gen_set_Rc0(ctx, t_ra);
2209 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2210 #endif
2212 /*** Integer shift ***/
2214 /* slw & slw. */
2215 static void gen_slw(DisasContext *ctx)
2217 TCGv t0, t1;
2219 t0 = tcg_temp_new();
2220 /* AND rS with a mask that is 0 when rB >= 0x20 */
2221 #if defined(TARGET_PPC64)
2222 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2223 tcg_gen_sari_tl(t0, t0, 0x3f);
2224 #else
2225 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2226 tcg_gen_sari_tl(t0, t0, 0x1f);
2227 #endif
2228 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2229 t1 = tcg_temp_new();
2230 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2231 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2232 tcg_temp_free(t1);
2233 tcg_temp_free(t0);
2234 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2235 if (unlikely(Rc(ctx->opcode) != 0))
2236 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2239 /* sraw & sraw. */
2240 static void gen_sraw(DisasContext *ctx)
2242 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2243 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2244 if (unlikely(Rc(ctx->opcode) != 0))
2245 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2248 /* srawi & srawi. */
2249 static void gen_srawi(DisasContext *ctx)
2251 int sh = SH(ctx->opcode);
2252 TCGv dst = cpu_gpr[rA(ctx->opcode)];
2253 TCGv src = cpu_gpr[rS(ctx->opcode)];
2254 if (sh == 0) {
2255 tcg_gen_ext32s_tl(dst, src);
2256 tcg_gen_movi_tl(cpu_ca, 0);
2257 } else {
2258 TCGv t0;
2259 tcg_gen_ext32s_tl(dst, src);
2260 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2261 t0 = tcg_temp_new();
2262 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2263 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2264 tcg_temp_free(t0);
2265 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2266 tcg_gen_sari_tl(dst, dst, sh);
2268 if (unlikely(Rc(ctx->opcode) != 0)) {
2269 gen_set_Rc0(ctx, dst);
2273 /* srw & srw. */
2274 static void gen_srw(DisasContext *ctx)
2276 TCGv t0, t1;
2278 t0 = tcg_temp_new();
2279 /* AND rS with a mask that is 0 when rB >= 0x20 */
2280 #if defined(TARGET_PPC64)
2281 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2282 tcg_gen_sari_tl(t0, t0, 0x3f);
2283 #else
2284 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2285 tcg_gen_sari_tl(t0, t0, 0x1f);
2286 #endif
2287 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2288 tcg_gen_ext32u_tl(t0, t0);
2289 t1 = tcg_temp_new();
2290 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2291 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2292 tcg_temp_free(t1);
2293 tcg_temp_free(t0);
2294 if (unlikely(Rc(ctx->opcode) != 0))
2295 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2298 #if defined(TARGET_PPC64)
2299 /* sld & sld. */
2300 static void gen_sld(DisasContext *ctx)
2302 TCGv t0, t1;
2304 t0 = tcg_temp_new();
2305 /* AND rS with a mask that is 0 when rB >= 0x40 */
2306 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2307 tcg_gen_sari_tl(t0, t0, 0x3f);
2308 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2309 t1 = tcg_temp_new();
2310 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2311 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2312 tcg_temp_free(t1);
2313 tcg_temp_free(t0);
2314 if (unlikely(Rc(ctx->opcode) != 0))
2315 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2318 /* srad & srad. */
2319 static void gen_srad(DisasContext *ctx)
2321 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
2322 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2323 if (unlikely(Rc(ctx->opcode) != 0))
2324 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2326 /* sradi & sradi. */
2327 static inline void gen_sradi(DisasContext *ctx, int n)
2329 int sh = SH(ctx->opcode) + (n << 5);
2330 TCGv dst = cpu_gpr[rA(ctx->opcode)];
2331 TCGv src = cpu_gpr[rS(ctx->opcode)];
2332 if (sh == 0) {
2333 tcg_gen_mov_tl(dst, src);
2334 tcg_gen_movi_tl(cpu_ca, 0);
2335 } else {
2336 TCGv t0;
2337 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2338 t0 = tcg_temp_new();
2339 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2340 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2341 tcg_temp_free(t0);
2342 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2343 tcg_gen_sari_tl(dst, src, sh);
2345 if (unlikely(Rc(ctx->opcode) != 0)) {
2346 gen_set_Rc0(ctx, dst);
2350 static void gen_sradi0(DisasContext *ctx)
2352 gen_sradi(ctx, 0);
2355 static void gen_sradi1(DisasContext *ctx)
2357 gen_sradi(ctx, 1);
2360 /* extswsli & extswsli. */
2361 static inline void gen_extswsli(DisasContext *ctx, int n)
2363 int sh = SH(ctx->opcode) + (n << 5);
2364 TCGv dst = cpu_gpr[rA(ctx->opcode)];
2365 TCGv src = cpu_gpr[rS(ctx->opcode)];
2367 tcg_gen_ext32s_tl(dst, src);
2368 tcg_gen_shli_tl(dst, dst, sh);
2369 if (unlikely(Rc(ctx->opcode) != 0)) {
2370 gen_set_Rc0(ctx, dst);
2374 static void gen_extswsli0(DisasContext *ctx)
2376 gen_extswsli(ctx, 0);
2379 static void gen_extswsli1(DisasContext *ctx)
2381 gen_extswsli(ctx, 1);
2384 /* srd & srd. */
2385 static void gen_srd(DisasContext *ctx)
2387 TCGv t0, t1;
2389 t0 = tcg_temp_new();
2390 /* AND rS with a mask that is 0 when rB >= 0x40 */
2391 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2392 tcg_gen_sari_tl(t0, t0, 0x3f);
2393 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2394 t1 = tcg_temp_new();
2395 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2396 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2397 tcg_temp_free(t1);
2398 tcg_temp_free(t0);
2399 if (unlikely(Rc(ctx->opcode) != 0))
2400 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2402 #endif
2404 /*** Addressing modes ***/
2405 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2406 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2407 target_long maskl)
2409 target_long simm = SIMM(ctx->opcode);
2411 simm &= ~maskl;
2412 if (rA(ctx->opcode) == 0) {
2413 if (NARROW_MODE(ctx)) {
2414 simm = (uint32_t)simm;
2416 tcg_gen_movi_tl(EA, simm);
2417 } else if (likely(simm != 0)) {
2418 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2419 if (NARROW_MODE(ctx)) {
2420 tcg_gen_ext32u_tl(EA, EA);
2422 } else {
2423 if (NARROW_MODE(ctx)) {
2424 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2425 } else {
2426 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2431 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2433 if (rA(ctx->opcode) == 0) {
2434 if (NARROW_MODE(ctx)) {
2435 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2436 } else {
2437 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2439 } else {
2440 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2441 if (NARROW_MODE(ctx)) {
2442 tcg_gen_ext32u_tl(EA, EA);
2447 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2449 if (rA(ctx->opcode) == 0) {
2450 tcg_gen_movi_tl(EA, 0);
2451 } else if (NARROW_MODE(ctx)) {
2452 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2453 } else {
2454 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2458 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2459 target_long val)
2461 tcg_gen_addi_tl(ret, arg1, val);
2462 if (NARROW_MODE(ctx)) {
2463 tcg_gen_ext32u_tl(ret, ret);
2467 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2469 TCGLabel *l1 = gen_new_label();
2470 TCGv t0 = tcg_temp_new();
2471 TCGv_i32 t1, t2;
2472 tcg_gen_andi_tl(t0, EA, mask);
2473 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2474 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2475 t2 = tcg_const_i32(ctx->opcode & 0x03FF0000);
2476 gen_update_nip(ctx, ctx->nip - 4);
2477 gen_helper_raise_exception_err(cpu_env, t1, t2);
2478 tcg_temp_free_i32(t1);
2479 tcg_temp_free_i32(t2);
2480 gen_set_label(l1);
2481 tcg_temp_free(t0);
2484 static inline void gen_align_no_le(DisasContext *ctx)
2486 gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
2487 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
2490 /*** Integer load ***/
2491 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
2492 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
2494 #define GEN_QEMU_LOAD_TL(ldop, op) \
2495 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \
2496 TCGv val, \
2497 TCGv addr) \
2499 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \
2502 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB))
2503 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
2504 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
2505 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
2506 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
2508 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
2509 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
2511 #define GEN_QEMU_LOAD_64(ldop, op) \
2512 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \
2513 TCGv_i64 val, \
2514 TCGv addr) \
2516 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \
2519 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB))
2520 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
2521 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
2522 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
2523 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q))
2525 #if defined(TARGET_PPC64)
2526 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q))
2527 #endif
2529 #define GEN_QEMU_STORE_TL(stop, op) \
2530 static void glue(gen_qemu_, stop)(DisasContext *ctx, \
2531 TCGv val, \
2532 TCGv addr) \
2534 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \
2537 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB))
2538 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
2539 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
2541 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
2542 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
2544 #define GEN_QEMU_STORE_64(stop, op) \
2545 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \
2546 TCGv_i64 val, \
2547 TCGv addr) \
2549 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \
2552 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB))
2553 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
2554 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
2555 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q))
2557 #if defined(TARGET_PPC64)
2558 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q))
2559 #endif
2561 #define GEN_LD(name, ldop, opc, type) \
2562 static void glue(gen_, name)(DisasContext *ctx) \
2564 TCGv EA; \
2565 gen_set_access_type(ctx, ACCESS_INT); \
2566 EA = tcg_temp_new(); \
2567 gen_addr_imm_index(ctx, EA, 0); \
2568 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2569 tcg_temp_free(EA); \
2572 #define GEN_LDU(name, ldop, opc, type) \
2573 static void glue(gen_, name##u)(DisasContext *ctx) \
2575 TCGv EA; \
2576 if (unlikely(rA(ctx->opcode) == 0 || \
2577 rA(ctx->opcode) == rD(ctx->opcode))) { \
2578 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2579 return; \
2581 gen_set_access_type(ctx, ACCESS_INT); \
2582 EA = tcg_temp_new(); \
2583 if (type == PPC_64B) \
2584 gen_addr_imm_index(ctx, EA, 0x03); \
2585 else \
2586 gen_addr_imm_index(ctx, EA, 0); \
2587 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2588 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2589 tcg_temp_free(EA); \
2592 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2593 static void glue(gen_, name##ux)(DisasContext *ctx) \
2595 TCGv EA; \
2596 if (unlikely(rA(ctx->opcode) == 0 || \
2597 rA(ctx->opcode) == rD(ctx->opcode))) { \
2598 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2599 return; \
2601 gen_set_access_type(ctx, ACCESS_INT); \
2602 EA = tcg_temp_new(); \
2603 gen_addr_reg_index(ctx, EA); \
2604 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2605 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2606 tcg_temp_free(EA); \
2609 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
2610 static void glue(gen_, name##x)(DisasContext *ctx) \
2612 TCGv EA; \
2613 chk; \
2614 gen_set_access_type(ctx, ACCESS_INT); \
2615 EA = tcg_temp_new(); \
2616 gen_addr_reg_index(ctx, EA); \
2617 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2618 tcg_temp_free(EA); \
2621 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2622 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2624 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
2625 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2627 #define GEN_LDS(name, ldop, op, type) \
2628 GEN_LD(name, ldop, op | 0x20, type); \
2629 GEN_LDU(name, ldop, op | 0x21, type); \
2630 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2631 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2633 /* lbz lbzu lbzux lbzx */
2634 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2635 /* lha lhau lhaux lhax */
2636 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2637 /* lhz lhzu lhzux lhzx */
2638 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2639 /* lwz lwzu lwzux lwzx */
2640 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2641 #if defined(TARGET_PPC64)
2642 /* lwaux */
2643 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2644 /* lwax */
2645 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2646 /* ldux */
2647 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B);
2648 /* ldx */
2649 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B);
2651 /* CI load/store variants */
2652 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
2653 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
2654 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
2655 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
2657 static void gen_ld(DisasContext *ctx)
2659 TCGv EA;
2660 if (Rc(ctx->opcode)) {
2661 if (unlikely(rA(ctx->opcode) == 0 ||
2662 rA(ctx->opcode) == rD(ctx->opcode))) {
2663 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2664 return;
2667 gen_set_access_type(ctx, ACCESS_INT);
2668 EA = tcg_temp_new();
2669 gen_addr_imm_index(ctx, EA, 0x03);
2670 if (ctx->opcode & 0x02) {
2671 /* lwa (lwau is undefined) */
2672 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2673 } else {
2674 /* ld - ldu */
2675 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2677 if (Rc(ctx->opcode))
2678 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2679 tcg_temp_free(EA);
2682 /* lq */
2683 static void gen_lq(DisasContext *ctx)
2685 int ra, rd;
2686 TCGv EA;
2688 /* lq is a legal user mode instruction starting in ISA 2.07 */
2689 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2690 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2692 if (!legal_in_user_mode && ctx->pr) {
2693 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2694 return;
2697 if (!le_is_supported && ctx->le_mode) {
2698 gen_align_no_le(ctx);
2699 return;
2701 ra = rA(ctx->opcode);
2702 rd = rD(ctx->opcode);
2703 if (unlikely((rd & 1) || rd == ra)) {
2704 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2705 return;
2708 gen_set_access_type(ctx, ACCESS_INT);
2709 EA = tcg_temp_new();
2710 gen_addr_imm_index(ctx, EA, 0x0F);
2712 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does
2713 necessary 64-bit byteswap already. */
2714 if (unlikely(ctx->le_mode)) {
2715 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA);
2716 gen_addr_add(ctx, EA, EA, 8);
2717 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA);
2718 } else {
2719 gen_qemu_ld64_i64(ctx, cpu_gpr[rd], EA);
2720 gen_addr_add(ctx, EA, EA, 8);
2721 gen_qemu_ld64_i64(ctx, cpu_gpr[rd + 1], EA);
2723 tcg_temp_free(EA);
2725 #endif
2727 /*** Integer store ***/
2728 #define GEN_ST(name, stop, opc, type) \
2729 static void glue(gen_, name)(DisasContext *ctx) \
2731 TCGv EA; \
2732 gen_set_access_type(ctx, ACCESS_INT); \
2733 EA = tcg_temp_new(); \
2734 gen_addr_imm_index(ctx, EA, 0); \
2735 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2736 tcg_temp_free(EA); \
2739 #define GEN_STU(name, stop, opc, type) \
2740 static void glue(gen_, stop##u)(DisasContext *ctx) \
2742 TCGv EA; \
2743 if (unlikely(rA(ctx->opcode) == 0)) { \
2744 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2745 return; \
2747 gen_set_access_type(ctx, ACCESS_INT); \
2748 EA = tcg_temp_new(); \
2749 if (type == PPC_64B) \
2750 gen_addr_imm_index(ctx, EA, 0x03); \
2751 else \
2752 gen_addr_imm_index(ctx, EA, 0); \
2753 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2754 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2755 tcg_temp_free(EA); \
2758 #define GEN_STUX(name, stop, opc2, opc3, type) \
2759 static void glue(gen_, name##ux)(DisasContext *ctx) \
2761 TCGv EA; \
2762 if (unlikely(rA(ctx->opcode) == 0)) { \
2763 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2764 return; \
2766 gen_set_access_type(ctx, ACCESS_INT); \
2767 EA = tcg_temp_new(); \
2768 gen_addr_reg_index(ctx, EA); \
2769 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2770 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2771 tcg_temp_free(EA); \
2774 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
2775 static void glue(gen_, name##x)(DisasContext *ctx) \
2777 TCGv EA; \
2778 chk; \
2779 gen_set_access_type(ctx, ACCESS_INT); \
2780 EA = tcg_temp_new(); \
2781 gen_addr_reg_index(ctx, EA); \
2782 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2783 tcg_temp_free(EA); \
2785 #define GEN_STX(name, stop, opc2, opc3, type) \
2786 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2788 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
2789 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2791 #define GEN_STS(name, stop, op, type) \
2792 GEN_ST(name, stop, op | 0x20, type); \
2793 GEN_STU(name, stop, op | 0x21, type); \
2794 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2795 GEN_STX(name, stop, 0x17, op | 0x00, type)
2797 /* stb stbu stbux stbx */
2798 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2799 /* sth sthu sthux sthx */
2800 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2801 /* stw stwu stwux stwx */
2802 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2803 #if defined(TARGET_PPC64)
2804 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B);
2805 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B);
2806 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
2807 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
2808 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
2809 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
2811 static void gen_std(DisasContext *ctx)
2813 int rs;
2814 TCGv EA;
2816 rs = rS(ctx->opcode);
2817 if ((ctx->opcode & 0x3) == 0x2) { /* stq */
2818 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2819 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
2821 if (!(ctx->insns_flags & PPC_64BX)) {
2822 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2825 if (!legal_in_user_mode && ctx->pr) {
2826 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2827 return;
2830 if (!le_is_supported && ctx->le_mode) {
2831 gen_align_no_le(ctx);
2832 return;
2835 if (unlikely(rs & 1)) {
2836 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2837 return;
2839 gen_set_access_type(ctx, ACCESS_INT);
2840 EA = tcg_temp_new();
2841 gen_addr_imm_index(ctx, EA, 0x03);
2843 /* We only need to swap high and low halves. gen_qemu_st64_i64 does
2844 necessary 64-bit byteswap already. */
2845 if (unlikely(ctx->le_mode)) {
2846 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA);
2847 gen_addr_add(ctx, EA, EA, 8);
2848 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
2849 } else {
2850 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
2851 gen_addr_add(ctx, EA, EA, 8);
2852 gen_qemu_st64_i64(ctx, cpu_gpr[rs + 1], EA);
2854 tcg_temp_free(EA);
2855 } else {
2856 /* std / stdu*/
2857 if (Rc(ctx->opcode)) {
2858 if (unlikely(rA(ctx->opcode) == 0)) {
2859 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2860 return;
2863 gen_set_access_type(ctx, ACCESS_INT);
2864 EA = tcg_temp_new();
2865 gen_addr_imm_index(ctx, EA, 0x03);
2866 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
2867 if (Rc(ctx->opcode))
2868 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2869 tcg_temp_free(EA);
2872 #endif
2873 /*** Integer load and store with byte reverse ***/
2875 /* lhbrx */
2876 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2878 /* lwbrx */
2879 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2881 #if defined(TARGET_PPC64)
2882 /* ldbrx */
2883 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
2884 /* stdbrx */
2885 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
2886 #endif /* TARGET_PPC64 */
2888 /* sthbrx */
2889 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2890 /* stwbrx */
2891 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2893 /*** Integer load and store multiple ***/
2895 /* lmw */
2896 static void gen_lmw(DisasContext *ctx)
2898 TCGv t0;
2899 TCGv_i32 t1;
2901 if (ctx->le_mode) {
2902 gen_align_no_le(ctx);
2903 return;
2905 gen_set_access_type(ctx, ACCESS_INT);
2906 t0 = tcg_temp_new();
2907 t1 = tcg_const_i32(rD(ctx->opcode));
2908 gen_addr_imm_index(ctx, t0, 0);
2909 gen_helper_lmw(cpu_env, t0, t1);
2910 tcg_temp_free(t0);
2911 tcg_temp_free_i32(t1);
2914 /* stmw */
2915 static void gen_stmw(DisasContext *ctx)
2917 TCGv t0;
2918 TCGv_i32 t1;
2920 if (ctx->le_mode) {
2921 gen_align_no_le(ctx);
2922 return;
2924 gen_set_access_type(ctx, ACCESS_INT);
2925 t0 = tcg_temp_new();
2926 t1 = tcg_const_i32(rS(ctx->opcode));
2927 gen_addr_imm_index(ctx, t0, 0);
2928 gen_helper_stmw(cpu_env, t0, t1);
2929 tcg_temp_free(t0);
2930 tcg_temp_free_i32(t1);
2933 /*** Integer load and store strings ***/
2935 /* lswi */
2936 /* PowerPC32 specification says we must generate an exception if
2937 * rA is in the range of registers to be loaded.
2938 * In an other hand, IBM says this is valid, but rA won't be loaded.
2939 * For now, I'll follow the spec...
2941 static void gen_lswi(DisasContext *ctx)
2943 TCGv t0;
2944 TCGv_i32 t1, t2;
2945 int nb = NB(ctx->opcode);
2946 int start = rD(ctx->opcode);
2947 int ra = rA(ctx->opcode);
2948 int nr;
2950 if (ctx->le_mode) {
2951 gen_align_no_le(ctx);
2952 return;
2954 if (nb == 0)
2955 nb = 32;
2956 nr = (nb + 3) / 4;
2957 if (unlikely(lsw_reg_in_range(start, nr, ra))) {
2958 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2959 return;
2961 gen_set_access_type(ctx, ACCESS_INT);
2962 t0 = tcg_temp_new();
2963 gen_addr_register(ctx, t0);
2964 t1 = tcg_const_i32(nb);
2965 t2 = tcg_const_i32(start);
2966 gen_helper_lsw(cpu_env, t0, t1, t2);
2967 tcg_temp_free(t0);
2968 tcg_temp_free_i32(t1);
2969 tcg_temp_free_i32(t2);
2972 /* lswx */
2973 static void gen_lswx(DisasContext *ctx)
2975 TCGv t0;
2976 TCGv_i32 t1, t2, t3;
2978 if (ctx->le_mode) {
2979 gen_align_no_le(ctx);
2980 return;
2982 gen_set_access_type(ctx, ACCESS_INT);
2983 t0 = tcg_temp_new();
2984 gen_addr_reg_index(ctx, t0);
2985 t1 = tcg_const_i32(rD(ctx->opcode));
2986 t2 = tcg_const_i32(rA(ctx->opcode));
2987 t3 = tcg_const_i32(rB(ctx->opcode));
2988 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
2989 tcg_temp_free(t0);
2990 tcg_temp_free_i32(t1);
2991 tcg_temp_free_i32(t2);
2992 tcg_temp_free_i32(t3);
2995 /* stswi */
2996 static void gen_stswi(DisasContext *ctx)
2998 TCGv t0;
2999 TCGv_i32 t1, t2;
3000 int nb = NB(ctx->opcode);
3002 if (ctx->le_mode) {
3003 gen_align_no_le(ctx);
3004 return;
3006 gen_set_access_type(ctx, ACCESS_INT);
3007 t0 = tcg_temp_new();
3008 gen_addr_register(ctx, t0);
3009 if (nb == 0)
3010 nb = 32;
3011 t1 = tcg_const_i32(nb);
3012 t2 = tcg_const_i32(rS(ctx->opcode));
3013 gen_helper_stsw(cpu_env, t0, t1, t2);
3014 tcg_temp_free(t0);
3015 tcg_temp_free_i32(t1);
3016 tcg_temp_free_i32(t2);
3019 /* stswx */
3020 static void gen_stswx(DisasContext *ctx)
3022 TCGv t0;
3023 TCGv_i32 t1, t2;
3025 if (ctx->le_mode) {
3026 gen_align_no_le(ctx);
3027 return;
3029 gen_set_access_type(ctx, ACCESS_INT);
3030 t0 = tcg_temp_new();
3031 gen_addr_reg_index(ctx, t0);
3032 t1 = tcg_temp_new_i32();
3033 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3034 tcg_gen_andi_i32(t1, t1, 0x7F);
3035 t2 = tcg_const_i32(rS(ctx->opcode));
3036 gen_helper_stsw(cpu_env, t0, t1, t2);
3037 tcg_temp_free(t0);
3038 tcg_temp_free_i32(t1);
3039 tcg_temp_free_i32(t2);
3042 /*** Memory synchronisation ***/
3043 /* eieio */
3044 static void gen_eieio(DisasContext *ctx)
3048 #if !defined(CONFIG_USER_ONLY)
3049 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3051 TCGv_i32 t;
3052 TCGLabel *l;
3054 if (!ctx->lazy_tlb_flush) {
3055 return;
3057 l = gen_new_label();
3058 t = tcg_temp_new_i32();
3059 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3060 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3061 if (global) {
3062 gen_helper_check_tlb_flush_global(cpu_env);
3063 } else {
3064 gen_helper_check_tlb_flush_local(cpu_env);
3066 gen_set_label(l);
3067 tcg_temp_free_i32(t);
3069 #else
3070 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3071 #endif
3073 /* isync */
3074 static void gen_isync(DisasContext *ctx)
3077 * We need to check for a pending TLB flush. This can only happen in
3078 * kernel mode however so check MSR_PR
3080 if (!ctx->pr) {
3081 gen_check_tlb_flush(ctx, false);
3083 gen_stop_exception(ctx);
3086 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE))
3088 #define LARX(name, memop) \
3089 static void gen_##name(DisasContext *ctx) \
3091 TCGv t0; \
3092 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; \
3093 int len = MEMOP_GET_SIZE(memop); \
3094 gen_set_access_type(ctx, ACCESS_RES); \
3095 t0 = tcg_temp_local_new(); \
3096 gen_addr_reg_index(ctx, t0); \
3097 if ((len) > 1) { \
3098 gen_check_align(ctx, t0, (len)-1); \
3100 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop); \
3101 tcg_gen_mov_tl(cpu_reserve, t0); \
3102 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val)); \
3103 tcg_temp_free(t0); \
3106 /* lwarx */
3107 LARX(lbarx, DEF_MEMOP(MO_UB))
3108 LARX(lharx, DEF_MEMOP(MO_UW))
3109 LARX(lwarx, DEF_MEMOP(MO_UL))
3111 #if defined(CONFIG_USER_ONLY)
3112 static void gen_conditional_store(DisasContext *ctx, TCGv EA,
3113 int reg, int memop)
3115 TCGv t0 = tcg_temp_new();
3117 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea));
3118 tcg_gen_movi_tl(t0, (MEMOP_GET_SIZE(memop) << 5) | reg);
3119 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info));
3120 tcg_temp_free(t0);
3121 gen_exception_err(ctx, POWERPC_EXCP_STCX, 0);
3123 #else
3124 static void gen_conditional_store(DisasContext *ctx, TCGv EA,
3125 int reg, int memop)
3127 TCGLabel *l1;
3129 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3130 l1 = gen_new_label();
3131 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1);
3132 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3133 tcg_gen_qemu_st_tl(cpu_gpr[reg], EA, ctx->mem_idx, memop);
3134 gen_set_label(l1);
3135 tcg_gen_movi_tl(cpu_reserve, -1);
3137 #endif
3139 #define STCX(name, memop) \
3140 static void gen_##name(DisasContext *ctx) \
3142 TCGv t0; \
3143 int len = MEMOP_GET_SIZE(memop); \
3144 gen_set_access_type(ctx, ACCESS_RES); \
3145 t0 = tcg_temp_local_new(); \
3146 gen_addr_reg_index(ctx, t0); \
3147 if (len > 1) { \
3148 gen_check_align(ctx, t0, (len) - 1); \
3150 gen_conditional_store(ctx, t0, rS(ctx->opcode), memop); \
3151 tcg_temp_free(t0); \
3154 STCX(stbcx_, DEF_MEMOP(MO_UB))
3155 STCX(sthcx_, DEF_MEMOP(MO_UW))
3156 STCX(stwcx_, DEF_MEMOP(MO_UL))
3158 #if defined(TARGET_PPC64)
3159 /* ldarx */
3160 LARX(ldarx, DEF_MEMOP(MO_Q))
3161 /* stdcx. */
3162 STCX(stdcx_, DEF_MEMOP(MO_Q))
3164 /* lqarx */
3165 static void gen_lqarx(DisasContext *ctx)
3167 TCGv EA;
3168 int rd = rD(ctx->opcode);
3169 TCGv gpr1, gpr2;
3171 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3172 (rd == rB(ctx->opcode)))) {
3173 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3174 return;
3177 gen_set_access_type(ctx, ACCESS_RES);
3178 EA = tcg_temp_local_new();
3179 gen_addr_reg_index(ctx, EA);
3180 gen_check_align(ctx, EA, 15);
3181 if (unlikely(ctx->le_mode)) {
3182 gpr1 = cpu_gpr[rd+1];
3183 gpr2 = cpu_gpr[rd];
3184 } else {
3185 gpr1 = cpu_gpr[rd];
3186 gpr2 = cpu_gpr[rd+1];
3188 tcg_gen_qemu_ld_i64(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3189 tcg_gen_mov_tl(cpu_reserve, EA);
3190 gen_addr_add(ctx, EA, EA, 8);
3191 tcg_gen_qemu_ld_i64(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3193 tcg_gen_st_tl(gpr1, cpu_env, offsetof(CPUPPCState, reserve_val));
3194 tcg_gen_st_tl(gpr2, cpu_env, offsetof(CPUPPCState, reserve_val2));
3195 tcg_temp_free(EA);
3198 /* stqcx. */
3199 static void gen_stqcx_(DisasContext *ctx)
3201 TCGv EA;
3202 int reg = rS(ctx->opcode);
3203 int len = 16;
3204 #if !defined(CONFIG_USER_ONLY)
3205 TCGLabel *l1;
3206 TCGv gpr1, gpr2;
3207 #endif
3209 if (unlikely((rD(ctx->opcode) & 1))) {
3210 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3211 return;
3213 gen_set_access_type(ctx, ACCESS_RES);
3214 EA = tcg_temp_local_new();
3215 gen_addr_reg_index(ctx, EA);
3216 if (len > 1) {
3217 gen_check_align(ctx, EA, (len) - 1);
3220 #if defined(CONFIG_USER_ONLY)
3221 gen_conditional_store(ctx, EA, reg, 16);
3222 #else
3223 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3224 l1 = gen_new_label();
3225 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, l1);
3226 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3228 if (unlikely(ctx->le_mode)) {
3229 gpr1 = cpu_gpr[reg + 1];
3230 gpr2 = cpu_gpr[reg];
3231 } else {
3232 gpr1 = cpu_gpr[reg];
3233 gpr2 = cpu_gpr[reg + 1];
3235 tcg_gen_qemu_st_tl(gpr1, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3236 gen_addr_add(ctx, EA, EA, 8);
3237 tcg_gen_qemu_st_tl(gpr2, EA, ctx->mem_idx, DEF_MEMOP(MO_Q));
3239 gen_set_label(l1);
3240 tcg_gen_movi_tl(cpu_reserve, -1);
3241 #endif
3242 tcg_temp_free(EA);
3245 #endif /* defined(TARGET_PPC64) */
3247 /* sync */
3248 static void gen_sync(DisasContext *ctx)
3250 uint32_t l = (ctx->opcode >> 21) & 3;
3253 * We may need to check for a pending TLB flush.
3255 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
3257 * Additionally, this can only happen in kernel mode however so
3258 * check MSR_PR as well.
3260 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
3261 gen_check_tlb_flush(ctx, true);
3265 /* wait */
3266 static void gen_wait(DisasContext *ctx)
3268 TCGv_i32 t0 = tcg_const_i32(1);
3269 tcg_gen_st_i32(t0, cpu_env,
3270 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3271 tcg_temp_free_i32(t0);
3272 /* Stop translation, as the CPU is supposed to sleep from now */
3273 gen_exception_nip(ctx, EXCP_HLT, ctx->nip);
3276 #if defined(TARGET_PPC64)
3277 static void gen_doze(DisasContext *ctx)
3279 #if defined(CONFIG_USER_ONLY)
3280 GEN_PRIV;
3281 #else
3282 TCGv_i32 t;
3284 CHK_HV;
3285 t = tcg_const_i32(PPC_PM_DOZE);
3286 gen_helper_pminsn(cpu_env, t);
3287 tcg_temp_free_i32(t);
3288 gen_stop_exception(ctx);
3289 #endif /* defined(CONFIG_USER_ONLY) */
3292 static void gen_nap(DisasContext *ctx)
3294 #if defined(CONFIG_USER_ONLY)
3295 GEN_PRIV;
3296 #else
3297 TCGv_i32 t;
3299 CHK_HV;
3300 t = tcg_const_i32(PPC_PM_NAP);
3301 gen_helper_pminsn(cpu_env, t);
3302 tcg_temp_free_i32(t);
3303 gen_stop_exception(ctx);
3304 #endif /* defined(CONFIG_USER_ONLY) */
3307 static void gen_sleep(DisasContext *ctx)
3309 #if defined(CONFIG_USER_ONLY)
3310 GEN_PRIV;
3311 #else
3312 TCGv_i32 t;
3314 CHK_HV;
3315 t = tcg_const_i32(PPC_PM_SLEEP);
3316 gen_helper_pminsn(cpu_env, t);
3317 tcg_temp_free_i32(t);
3318 gen_stop_exception(ctx);
3319 #endif /* defined(CONFIG_USER_ONLY) */
3322 static void gen_rvwinkle(DisasContext *ctx)
3324 #if defined(CONFIG_USER_ONLY)
3325 GEN_PRIV;
3326 #else
3327 TCGv_i32 t;
3329 CHK_HV;
3330 t = tcg_const_i32(PPC_PM_RVWINKLE);
3331 gen_helper_pminsn(cpu_env, t);
3332 tcg_temp_free_i32(t);
3333 gen_stop_exception(ctx);
3334 #endif /* defined(CONFIG_USER_ONLY) */
3336 #endif /* #if defined(TARGET_PPC64) */
3338 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
3340 #if defined(TARGET_PPC64)
3341 if (ctx->has_cfar)
3342 tcg_gen_movi_tl(cpu_cfar, nip);
3343 #endif
3346 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
3348 if (unlikely(ctx->singlestep_enabled)) {
3349 return false;
3352 #ifndef CONFIG_USER_ONLY
3353 return (ctx->tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
3354 #else
3355 return true;
3356 #endif
3359 /*** Branch ***/
3360 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3362 if (NARROW_MODE(ctx)) {
3363 dest = (uint32_t) dest;
3365 if (use_goto_tb(ctx, dest)) {
3366 tcg_gen_goto_tb(n);
3367 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3368 tcg_gen_exit_tb((uintptr_t)ctx->tb + n);
3369 } else {
3370 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3371 if (unlikely(ctx->singlestep_enabled)) {
3372 if ((ctx->singlestep_enabled &
3373 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3374 (ctx->exception == POWERPC_EXCP_BRANCH ||
3375 ctx->exception == POWERPC_EXCP_TRACE)) {
3376 gen_exception_nip(ctx, POWERPC_EXCP_TRACE, dest);
3378 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3379 gen_debug_exception(ctx);
3382 tcg_gen_exit_tb(0);
3386 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3388 if (NARROW_MODE(ctx)) {
3389 nip = (uint32_t)nip;
3391 tcg_gen_movi_tl(cpu_lr, nip);
3394 /* b ba bl bla */
3395 static void gen_b(DisasContext *ctx)
3397 target_ulong li, target;
3399 ctx->exception = POWERPC_EXCP_BRANCH;
3400 /* sign extend LI */
3401 li = LI(ctx->opcode);
3402 li = (li ^ 0x02000000) - 0x02000000;
3403 if (likely(AA(ctx->opcode) == 0)) {
3404 target = ctx->nip + li - 4;
3405 } else {
3406 target = li;
3408 if (LK(ctx->opcode)) {
3409 gen_setlr(ctx, ctx->nip);
3411 gen_update_cfar(ctx, ctx->nip - 4);
3412 gen_goto_tb(ctx, 0, target);
3415 #define BCOND_IM 0
3416 #define BCOND_LR 1
3417 #define BCOND_CTR 2
3418 #define BCOND_TAR 3
3420 static inline void gen_bcond(DisasContext *ctx, int type)
3422 uint32_t bo = BO(ctx->opcode);
3423 TCGLabel *l1;
3424 TCGv target;
3426 ctx->exception = POWERPC_EXCP_BRANCH;
3427 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3428 target = tcg_temp_local_new();
3429 if (type == BCOND_CTR)
3430 tcg_gen_mov_tl(target, cpu_ctr);
3431 else if (type == BCOND_TAR)
3432 gen_load_spr(target, SPR_TAR);
3433 else
3434 tcg_gen_mov_tl(target, cpu_lr);
3435 } else {
3436 TCGV_UNUSED(target);
3438 if (LK(ctx->opcode))
3439 gen_setlr(ctx, ctx->nip);
3440 l1 = gen_new_label();
3441 if ((bo & 0x4) == 0) {
3442 /* Decrement and test CTR */
3443 TCGv temp = tcg_temp_new();
3444 if (unlikely(type == BCOND_CTR)) {
3445 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3446 return;
3448 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3449 if (NARROW_MODE(ctx)) {
3450 tcg_gen_ext32u_tl(temp, cpu_ctr);
3451 } else {
3452 tcg_gen_mov_tl(temp, cpu_ctr);
3454 if (bo & 0x2) {
3455 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3456 } else {
3457 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3459 tcg_temp_free(temp);
3461 if ((bo & 0x10) == 0) {
3462 /* Test CR */
3463 uint32_t bi = BI(ctx->opcode);
3464 uint32_t mask = 0x08 >> (bi & 0x03);
3465 TCGv_i32 temp = tcg_temp_new_i32();
3467 if (bo & 0x8) {
3468 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3469 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3470 } else {
3471 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3472 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3474 tcg_temp_free_i32(temp);
3476 gen_update_cfar(ctx, ctx->nip - 4);
3477 if (type == BCOND_IM) {
3478 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3479 if (likely(AA(ctx->opcode) == 0)) {
3480 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3481 } else {
3482 gen_goto_tb(ctx, 0, li);
3484 if ((bo & 0x14) != 0x14) {
3485 gen_set_label(l1);
3486 gen_goto_tb(ctx, 1, ctx->nip);
3488 } else {
3489 if (NARROW_MODE(ctx)) {
3490 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3491 } else {
3492 tcg_gen_andi_tl(cpu_nip, target, ~3);
3494 tcg_gen_exit_tb(0);
3495 if ((bo & 0x14) != 0x14) {
3496 gen_set_label(l1);
3497 gen_update_nip(ctx, ctx->nip);
3498 tcg_gen_exit_tb(0);
3501 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3502 tcg_temp_free(target);
3506 static void gen_bc(DisasContext *ctx)
3508 gen_bcond(ctx, BCOND_IM);
3511 static void gen_bcctr(DisasContext *ctx)
3513 gen_bcond(ctx, BCOND_CTR);
3516 static void gen_bclr(DisasContext *ctx)
3518 gen_bcond(ctx, BCOND_LR);
3521 static void gen_bctar(DisasContext *ctx)
3523 gen_bcond(ctx, BCOND_TAR);
3526 /*** Condition register logical ***/
3527 #define GEN_CRLOGIC(name, tcg_op, opc) \
3528 static void glue(gen_, name)(DisasContext *ctx) \
3530 uint8_t bitmask; \
3531 int sh; \
3532 TCGv_i32 t0, t1; \
3533 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3534 t0 = tcg_temp_new_i32(); \
3535 if (sh > 0) \
3536 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3537 else if (sh < 0) \
3538 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3539 else \
3540 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3541 t1 = tcg_temp_new_i32(); \
3542 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3543 if (sh > 0) \
3544 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3545 else if (sh < 0) \
3546 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3547 else \
3548 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3549 tcg_op(t0, t0, t1); \
3550 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
3551 tcg_gen_andi_i32(t0, t0, bitmask); \
3552 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3553 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3554 tcg_temp_free_i32(t0); \
3555 tcg_temp_free_i32(t1); \
3558 /* crand */
3559 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3560 /* crandc */
3561 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3562 /* creqv */
3563 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3564 /* crnand */
3565 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3566 /* crnor */
3567 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3568 /* cror */
3569 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3570 /* crorc */
3571 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3572 /* crxor */
3573 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3575 /* mcrf */
3576 static void gen_mcrf(DisasContext *ctx)
3578 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3581 /*** System linkage ***/
3583 /* rfi (supervisor only) */
3584 static void gen_rfi(DisasContext *ctx)
3586 #if defined(CONFIG_USER_ONLY)
3587 GEN_PRIV;
3588 #else
3589 /* This instruction doesn't exist anymore on 64-bit server
3590 * processors compliant with arch 2.x
3592 if (ctx->insns_flags & PPC_SEGMENT_64B) {
3593 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3594 return;
3596 /* Restore CPU state */
3597 CHK_SV;
3598 gen_update_cfar(ctx, ctx->nip - 4);
3599 gen_helper_rfi(cpu_env);
3600 gen_sync_exception(ctx);
3601 #endif
3604 #if defined(TARGET_PPC64)
3605 static void gen_rfid(DisasContext *ctx)
3607 #if defined(CONFIG_USER_ONLY)
3608 GEN_PRIV;
3609 #else
3610 /* Restore CPU state */
3611 CHK_SV;
3612 gen_update_cfar(ctx, ctx->nip - 4);
3613 gen_helper_rfid(cpu_env);
3614 gen_sync_exception(ctx);
3615 #endif
3618 static void gen_hrfid(DisasContext *ctx)
3620 #if defined(CONFIG_USER_ONLY)
3621 GEN_PRIV;
3622 #else
3623 /* Restore CPU state */
3624 CHK_HV;
3625 gen_helper_hrfid(cpu_env);
3626 gen_sync_exception(ctx);
3627 #endif
3629 #endif
3631 /* sc */
3632 #if defined(CONFIG_USER_ONLY)
3633 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3634 #else
3635 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3636 #endif
3637 static void gen_sc(DisasContext *ctx)
3639 uint32_t lev;
3641 lev = (ctx->opcode >> 5) & 0x7F;
3642 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3645 /*** Trap ***/
3647 /* Check for unconditional traps (always or never) */
3648 static bool check_unconditional_trap(DisasContext *ctx)
3650 /* Trap never */
3651 if (TO(ctx->opcode) == 0) {
3652 return true;
3654 /* Trap always */
3655 if (TO(ctx->opcode) == 31) {
3656 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
3657 return true;
3659 return false;
3662 /* tw */
3663 static void gen_tw(DisasContext *ctx)
3665 TCGv_i32 t0;
3667 if (check_unconditional_trap(ctx)) {
3668 return;
3670 t0 = tcg_const_i32(TO(ctx->opcode));
3671 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3672 t0);
3673 tcg_temp_free_i32(t0);
3676 /* twi */
3677 static void gen_twi(DisasContext *ctx)
3679 TCGv t0;
3680 TCGv_i32 t1;
3682 if (check_unconditional_trap(ctx)) {
3683 return;
3685 t0 = tcg_const_tl(SIMM(ctx->opcode));
3686 t1 = tcg_const_i32(TO(ctx->opcode));
3687 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3688 tcg_temp_free(t0);
3689 tcg_temp_free_i32(t1);
3692 #if defined(TARGET_PPC64)
3693 /* td */
3694 static void gen_td(DisasContext *ctx)
3696 TCGv_i32 t0;
3698 if (check_unconditional_trap(ctx)) {
3699 return;
3701 t0 = tcg_const_i32(TO(ctx->opcode));
3702 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3703 t0);
3704 tcg_temp_free_i32(t0);
3707 /* tdi */
3708 static void gen_tdi(DisasContext *ctx)
3710 TCGv t0;
3711 TCGv_i32 t1;
3713 if (check_unconditional_trap(ctx)) {
3714 return;
3716 t0 = tcg_const_tl(SIMM(ctx->opcode));
3717 t1 = tcg_const_i32(TO(ctx->opcode));
3718 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3719 tcg_temp_free(t0);
3720 tcg_temp_free_i32(t1);
3722 #endif
3724 /*** Processor control ***/
3726 static void gen_read_xer(TCGv dst)
3728 TCGv t0 = tcg_temp_new();
3729 TCGv t1 = tcg_temp_new();
3730 TCGv t2 = tcg_temp_new();
3731 tcg_gen_mov_tl(dst, cpu_xer);
3732 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
3733 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
3734 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
3735 tcg_gen_or_tl(t0, t0, t1);
3736 tcg_gen_or_tl(dst, dst, t2);
3737 tcg_gen_or_tl(dst, dst, t0);
3738 tcg_temp_free(t0);
3739 tcg_temp_free(t1);
3740 tcg_temp_free(t2);
3743 static void gen_write_xer(TCGv src)
3745 tcg_gen_andi_tl(cpu_xer, src,
3746 ~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA)));
3747 tcg_gen_shri_tl(cpu_so, src, XER_SO);
3748 tcg_gen_shri_tl(cpu_ov, src, XER_OV);
3749 tcg_gen_shri_tl(cpu_ca, src, XER_CA);
3750 tcg_gen_andi_tl(cpu_so, cpu_so, 1);
3751 tcg_gen_andi_tl(cpu_ov, cpu_ov, 1);
3752 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
3755 /* mcrxr */
3756 static void gen_mcrxr(DisasContext *ctx)
3758 TCGv_i32 t0 = tcg_temp_new_i32();
3759 TCGv_i32 t1 = tcg_temp_new_i32();
3760 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
3762 tcg_gen_trunc_tl_i32(t0, cpu_so);
3763 tcg_gen_trunc_tl_i32(t1, cpu_ov);
3764 tcg_gen_trunc_tl_i32(dst, cpu_ca);
3765 tcg_gen_shli_i32(t0, t0, 3);
3766 tcg_gen_shli_i32(t1, t1, 2);
3767 tcg_gen_shli_i32(dst, dst, 1);
3768 tcg_gen_or_i32(dst, dst, t0);
3769 tcg_gen_or_i32(dst, dst, t1);
3770 tcg_temp_free_i32(t0);
3771 tcg_temp_free_i32(t1);
3773 tcg_gen_movi_tl(cpu_so, 0);
3774 tcg_gen_movi_tl(cpu_ov, 0);
3775 tcg_gen_movi_tl(cpu_ca, 0);
3778 /* mfcr mfocrf */
3779 static void gen_mfcr(DisasContext *ctx)
3781 uint32_t crm, crn;
3783 if (likely(ctx->opcode & 0x00100000)) {
3784 crm = CRM(ctx->opcode);
3785 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3786 crn = ctz32 (crm);
3787 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3788 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3789 cpu_gpr[rD(ctx->opcode)], crn * 4);
3791 } else {
3792 TCGv_i32 t0 = tcg_temp_new_i32();
3793 tcg_gen_mov_i32(t0, cpu_crf[0]);
3794 tcg_gen_shli_i32(t0, t0, 4);
3795 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3796 tcg_gen_shli_i32(t0, t0, 4);
3797 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3798 tcg_gen_shli_i32(t0, t0, 4);
3799 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3800 tcg_gen_shli_i32(t0, t0, 4);
3801 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3802 tcg_gen_shli_i32(t0, t0, 4);
3803 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3804 tcg_gen_shli_i32(t0, t0, 4);
3805 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3806 tcg_gen_shli_i32(t0, t0, 4);
3807 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3808 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3809 tcg_temp_free_i32(t0);
3813 /* mfmsr */
3814 static void gen_mfmsr(DisasContext *ctx)
3816 CHK_SV;
3817 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3820 static void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
3822 #if 0
3823 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3824 printf("ERROR: try to access SPR %d !\n", sprn);
3825 #endif
3827 #define SPR_NOACCESS (&spr_noaccess)
3829 /* mfspr */
3830 static inline void gen_op_mfspr(DisasContext *ctx)
3832 void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
3833 uint32_t sprn = SPR(ctx->opcode);
3835 #if defined(CONFIG_USER_ONLY)
3836 read_cb = ctx->spr_cb[sprn].uea_read;
3837 #else
3838 if (ctx->pr) {
3839 read_cb = ctx->spr_cb[sprn].uea_read;
3840 } else if (ctx->hv) {
3841 read_cb = ctx->spr_cb[sprn].hea_read;
3842 } else {
3843 read_cb = ctx->spr_cb[sprn].oea_read;
3845 #endif
3846 if (likely(read_cb != NULL)) {
3847 if (likely(read_cb != SPR_NOACCESS)) {
3848 (*read_cb)(ctx, rD(ctx->opcode), sprn);
3849 } else {
3850 /* Privilege exception */
3851 /* This is a hack to avoid warnings when running Linux:
3852 * this OS breaks the PowerPC virtualisation model,
3853 * allowing userland application to read the PVR
3855 if (sprn != SPR_PVR) {
3856 fprintf(stderr, "Trying to read privileged spr %d (0x%03x) at "
3857 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3858 if (qemu_log_separate()) {
3859 qemu_log("Trying to read privileged spr %d (0x%03x) at "
3860 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3863 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
3865 } else {
3866 /* ISA 2.07 defines these as no-ops */
3867 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
3868 (sprn >= 808 && sprn <= 811)) {
3869 /* This is a nop */
3870 return;
3872 /* Not defined */
3873 fprintf(stderr, "Trying to read invalid spr %d (0x%03x) at "
3874 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3875 if (qemu_log_separate()) {
3876 qemu_log("Trying to read invalid spr %d (0x%03x) at "
3877 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
3880 /* The behaviour depends on MSR:PR and SPR# bit 0x10,
3881 * it can generate a priv, a hv emu or a no-op
3883 if (sprn & 0x10) {
3884 if (ctx->pr) {
3885 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3887 } else {
3888 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
3889 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3895 static void gen_mfspr(DisasContext *ctx)
3897 gen_op_mfspr(ctx);
3900 /* mftb */
3901 static void gen_mftb(DisasContext *ctx)
3903 gen_op_mfspr(ctx);
3906 /* mtcrf mtocrf*/
3907 static void gen_mtcrf(DisasContext *ctx)
3909 uint32_t crm, crn;
3911 crm = CRM(ctx->opcode);
3912 if (likely((ctx->opcode & 0x00100000))) {
3913 if (crm && ((crm & (crm - 1)) == 0)) {
3914 TCGv_i32 temp = tcg_temp_new_i32();
3915 crn = ctz32 (crm);
3916 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3917 tcg_gen_shri_i32(temp, temp, crn * 4);
3918 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3919 tcg_temp_free_i32(temp);
3921 } else {
3922 TCGv_i32 temp = tcg_temp_new_i32();
3923 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3924 for (crn = 0 ; crn < 8 ; crn++) {
3925 if (crm & (1 << crn)) {
3926 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3927 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3930 tcg_temp_free_i32(temp);
3934 /* mtmsr */
3935 #if defined(TARGET_PPC64)
3936 static void gen_mtmsrd(DisasContext *ctx)
3938 CHK_SV;
3940 #if !defined(CONFIG_USER_ONLY)
3941 if (ctx->opcode & 0x00010000) {
3942 /* Special form that does not need any synchronisation */
3943 TCGv t0 = tcg_temp_new();
3944 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3945 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
3946 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3947 tcg_temp_free(t0);
3948 } else {
3949 /* XXX: we need to update nip before the store
3950 * if we enter power saving mode, we will exit the loop
3951 * directly from ppc_store_msr
3953 gen_update_nip(ctx, ctx->nip);
3954 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
3955 /* Must stop the translation as machine state (may have) changed */
3956 /* Note that mtmsr is not always defined as context-synchronizing */
3957 gen_stop_exception(ctx);
3959 #endif /* !defined(CONFIG_USER_ONLY) */
3961 #endif /* defined(TARGET_PPC64) */
3963 static void gen_mtmsr(DisasContext *ctx)
3965 CHK_SV;
3967 #if !defined(CONFIG_USER_ONLY)
3968 if (ctx->opcode & 0x00010000) {
3969 /* Special form that does not need any synchronisation */
3970 TCGv t0 = tcg_temp_new();
3971 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3972 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
3973 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3974 tcg_temp_free(t0);
3975 } else {
3976 TCGv msr = tcg_temp_new();
3978 /* XXX: we need to update nip before the store
3979 * if we enter power saving mode, we will exit the loop
3980 * directly from ppc_store_msr
3982 gen_update_nip(ctx, ctx->nip);
3983 #if defined(TARGET_PPC64)
3984 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
3985 #else
3986 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
3987 #endif
3988 gen_helper_store_msr(cpu_env, msr);
3989 tcg_temp_free(msr);
3990 /* Must stop the translation as machine state (may have) changed */
3991 /* Note that mtmsr is not always defined as context-synchronizing */
3992 gen_stop_exception(ctx);
3994 #endif
3997 /* mtspr */
3998 static void gen_mtspr(DisasContext *ctx)
4000 void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4001 uint32_t sprn = SPR(ctx->opcode);
4003 #if defined(CONFIG_USER_ONLY)
4004 write_cb = ctx->spr_cb[sprn].uea_write;
4005 #else
4006 if (ctx->pr) {
4007 write_cb = ctx->spr_cb[sprn].uea_write;
4008 } else if (ctx->hv) {
4009 write_cb = ctx->spr_cb[sprn].hea_write;
4010 } else {
4011 write_cb = ctx->spr_cb[sprn].oea_write;
4013 #endif
4014 if (likely(write_cb != NULL)) {
4015 if (likely(write_cb != SPR_NOACCESS)) {
4016 (*write_cb)(ctx, sprn, rS(ctx->opcode));
4017 } else {
4018 /* Privilege exception */
4019 fprintf(stderr, "Trying to write privileged spr %d (0x%03x) at "
4020 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4021 if (qemu_log_separate()) {
4022 qemu_log("Trying to write privileged spr %d (0x%03x) at "
4023 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4025 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4027 } else {
4028 /* ISA 2.07 defines these as no-ops */
4029 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4030 (sprn >= 808 && sprn <= 811)) {
4031 /* This is a nop */
4032 return;
4035 /* Not defined */
4036 if (qemu_log_separate()) {
4037 qemu_log("Trying to write invalid spr %d (0x%03x) at "
4038 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4040 fprintf(stderr, "Trying to write invalid spr %d (0x%03x) at "
4041 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4044 /* The behaviour depends on MSR:PR and SPR# bit 0x10,
4045 * it can generate a priv, a hv emu or a no-op
4047 if (sprn & 0x10) {
4048 if (ctx->pr) {
4049 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4051 } else {
4052 if (ctx->pr || sprn == 0) {
4053 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4059 #if defined(TARGET_PPC64)
4060 /* setb */
4061 static void gen_setb(DisasContext *ctx)
4063 TCGv_i32 t0 = tcg_temp_new_i32();
4064 TCGv_i32 t8 = tcg_temp_new_i32();
4065 TCGv_i32 tm1 = tcg_temp_new_i32();
4066 int crf = crfS(ctx->opcode);
4068 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4069 tcg_gen_movi_i32(t8, 8);
4070 tcg_gen_movi_i32(tm1, -1);
4071 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4072 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4074 tcg_temp_free_i32(t0);
4075 tcg_temp_free_i32(t8);
4076 tcg_temp_free_i32(tm1);
4078 #endif
4080 /*** Cache management ***/
4082 /* dcbf */
4083 static void gen_dcbf(DisasContext *ctx)
4085 /* XXX: specification says this is treated as a load by the MMU */
4086 TCGv t0;
4087 gen_set_access_type(ctx, ACCESS_CACHE);
4088 t0 = tcg_temp_new();
4089 gen_addr_reg_index(ctx, t0);
4090 gen_qemu_ld8u(ctx, t0, t0);
4091 tcg_temp_free(t0);
4094 /* dcbi (Supervisor only) */
4095 static void gen_dcbi(DisasContext *ctx)
4097 #if defined(CONFIG_USER_ONLY)
4098 GEN_PRIV;
4099 #else
4100 TCGv EA, val;
4102 CHK_SV;
4103 EA = tcg_temp_new();
4104 gen_set_access_type(ctx, ACCESS_CACHE);
4105 gen_addr_reg_index(ctx, EA);
4106 val = tcg_temp_new();
4107 /* XXX: specification says this should be treated as a store by the MMU */
4108 gen_qemu_ld8u(ctx, val, EA);
4109 gen_qemu_st8(ctx, val, EA);
4110 tcg_temp_free(val);
4111 tcg_temp_free(EA);
4112 #endif /* defined(CONFIG_USER_ONLY) */
4115 /* dcdst */
4116 static void gen_dcbst(DisasContext *ctx)
4118 /* XXX: specification say this is treated as a load by the MMU */
4119 TCGv t0;
4120 gen_set_access_type(ctx, ACCESS_CACHE);
4121 t0 = tcg_temp_new();
4122 gen_addr_reg_index(ctx, t0);
4123 gen_qemu_ld8u(ctx, t0, t0);
4124 tcg_temp_free(t0);
4127 /* dcbt */
4128 static void gen_dcbt(DisasContext *ctx)
4130 /* interpreted as no-op */
4131 /* XXX: specification say this is treated as a load by the MMU
4132 * but does not generate any exception
4136 /* dcbtst */
4137 static void gen_dcbtst(DisasContext *ctx)
4139 /* interpreted as no-op */
4140 /* XXX: specification say this is treated as a load by the MMU
4141 * but does not generate any exception
4145 /* dcbtls */
4146 static void gen_dcbtls(DisasContext *ctx)
4148 /* Always fails locking the cache */
4149 TCGv t0 = tcg_temp_new();
4150 gen_load_spr(t0, SPR_Exxx_L1CSR0);
4151 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4152 gen_store_spr(SPR_Exxx_L1CSR0, t0);
4153 tcg_temp_free(t0);
4156 /* dcbz */
4157 static void gen_dcbz(DisasContext *ctx)
4159 TCGv tcgv_addr;
4160 TCGv_i32 tcgv_op;
4162 gen_set_access_type(ctx, ACCESS_CACHE);
4163 tcgv_addr = tcg_temp_new();
4164 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
4165 gen_addr_reg_index(ctx, tcgv_addr);
4166 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
4167 tcg_temp_free(tcgv_addr);
4168 tcg_temp_free_i32(tcgv_op);
4171 /* dst / dstt */
4172 static void gen_dst(DisasContext *ctx)
4174 if (rA(ctx->opcode) == 0) {
4175 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4176 } else {
4177 /* interpreted as no-op */
4181 /* dstst /dststt */
4182 static void gen_dstst(DisasContext *ctx)
4184 if (rA(ctx->opcode) == 0) {
4185 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4186 } else {
4187 /* interpreted as no-op */
4192 /* dss / dssall */
4193 static void gen_dss(DisasContext *ctx)
4195 /* interpreted as no-op */
4198 /* icbi */
4199 static void gen_icbi(DisasContext *ctx)
4201 TCGv t0;
4202 gen_set_access_type(ctx, ACCESS_CACHE);
4203 t0 = tcg_temp_new();
4204 gen_addr_reg_index(ctx, t0);
4205 gen_helper_icbi(cpu_env, t0);
4206 tcg_temp_free(t0);
4209 /* Optional: */
4210 /* dcba */
4211 static void gen_dcba(DisasContext *ctx)
4213 /* interpreted as no-op */
4214 /* XXX: specification say this is treated as a store by the MMU
4215 * but does not generate any exception
4219 /*** Segment register manipulation ***/
4220 /* Supervisor only: */
4222 /* mfsr */
4223 static void gen_mfsr(DisasContext *ctx)
4225 #if defined(CONFIG_USER_ONLY)
4226 GEN_PRIV;
4227 #else
4228 TCGv t0;
4230 CHK_SV;
4231 t0 = tcg_const_tl(SR(ctx->opcode));
4232 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4233 tcg_temp_free(t0);
4234 #endif /* defined(CONFIG_USER_ONLY) */
4237 /* mfsrin */
4238 static void gen_mfsrin(DisasContext *ctx)
4240 #if defined(CONFIG_USER_ONLY)
4241 GEN_PRIV;
4242 #else
4243 TCGv t0;
4245 CHK_SV;
4246 t0 = tcg_temp_new();
4247 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4248 tcg_gen_andi_tl(t0, t0, 0xF);
4249 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4250 tcg_temp_free(t0);
4251 #endif /* defined(CONFIG_USER_ONLY) */
4254 /* mtsr */
4255 static void gen_mtsr(DisasContext *ctx)
4257 #if defined(CONFIG_USER_ONLY)
4258 GEN_PRIV;
4259 #else
4260 TCGv t0;
4262 CHK_SV;
4263 t0 = tcg_const_tl(SR(ctx->opcode));
4264 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4265 tcg_temp_free(t0);
4266 #endif /* defined(CONFIG_USER_ONLY) */
4269 /* mtsrin */
4270 static void gen_mtsrin(DisasContext *ctx)
4272 #if defined(CONFIG_USER_ONLY)
4273 GEN_PRIV;
4274 #else
4275 TCGv t0;
4276 CHK_SV;
4278 t0 = tcg_temp_new();
4279 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4280 tcg_gen_andi_tl(t0, t0, 0xF);
4281 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
4282 tcg_temp_free(t0);
4283 #endif /* defined(CONFIG_USER_ONLY) */
4286 #if defined(TARGET_PPC64)
4287 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4289 /* mfsr */
4290 static void gen_mfsr_64b(DisasContext *ctx)
4292 #if defined(CONFIG_USER_ONLY)
4293 GEN_PRIV;
4294 #else
4295 TCGv t0;
4297 CHK_SV;
4298 t0 = tcg_const_tl(SR(ctx->opcode));
4299 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4300 tcg_temp_free(t0);
4301 #endif /* defined(CONFIG_USER_ONLY) */
4304 /* mfsrin */
4305 static void gen_mfsrin_64b(DisasContext *ctx)
4307 #if defined(CONFIG_USER_ONLY)
4308 GEN_PRIV;
4309 #else
4310 TCGv t0;
4312 CHK_SV;
4313 t0 = tcg_temp_new();
4314 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4315 tcg_gen_andi_tl(t0, t0, 0xF);
4316 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4317 tcg_temp_free(t0);
4318 #endif /* defined(CONFIG_USER_ONLY) */
4321 /* mtsr */
4322 static void gen_mtsr_64b(DisasContext *ctx)
4324 #if defined(CONFIG_USER_ONLY)
4325 GEN_PRIV;
4326 #else
4327 TCGv t0;
4329 CHK_SV;
4330 t0 = tcg_const_tl(SR(ctx->opcode));
4331 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4332 tcg_temp_free(t0);
4333 #endif /* defined(CONFIG_USER_ONLY) */
4336 /* mtsrin */
4337 static void gen_mtsrin_64b(DisasContext *ctx)
4339 #if defined(CONFIG_USER_ONLY)
4340 GEN_PRIV;
4341 #else
4342 TCGv t0;
4344 CHK_SV;
4345 t0 = tcg_temp_new();
4346 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4347 tcg_gen_andi_tl(t0, t0, 0xF);
4348 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4349 tcg_temp_free(t0);
4350 #endif /* defined(CONFIG_USER_ONLY) */
4353 /* slbmte */
4354 static void gen_slbmte(DisasContext *ctx)
4356 #if defined(CONFIG_USER_ONLY)
4357 GEN_PRIV;
4358 #else
4359 CHK_SV;
4361 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
4362 cpu_gpr[rS(ctx->opcode)]);
4363 #endif /* defined(CONFIG_USER_ONLY) */
4366 static void gen_slbmfee(DisasContext *ctx)
4368 #if defined(CONFIG_USER_ONLY)
4369 GEN_PRIV;
4370 #else
4371 CHK_SV;
4373 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4374 cpu_gpr[rB(ctx->opcode)]);
4375 #endif /* defined(CONFIG_USER_ONLY) */
4378 static void gen_slbmfev(DisasContext *ctx)
4380 #if defined(CONFIG_USER_ONLY)
4381 GEN_PRIV;
4382 #else
4383 CHK_SV;
4385 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4386 cpu_gpr[rB(ctx->opcode)]);
4387 #endif /* defined(CONFIG_USER_ONLY) */
4390 static void gen_slbfee_(DisasContext *ctx)
4392 #if defined(CONFIG_USER_ONLY)
4393 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4394 #else
4395 TCGLabel *l1, *l2;
4397 if (unlikely(ctx->pr)) {
4398 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4399 return;
4401 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4402 cpu_gpr[rB(ctx->opcode)]);
4403 l1 = gen_new_label();
4404 l2 = gen_new_label();
4405 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4406 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
4407 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
4408 tcg_gen_br(l2);
4409 gen_set_label(l1);
4410 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
4411 gen_set_label(l2);
4412 #endif
4414 #endif /* defined(TARGET_PPC64) */
4416 /*** Lookaside buffer management ***/
4417 /* Optional & supervisor only: */
4419 /* tlbia */
4420 static void gen_tlbia(DisasContext *ctx)
4422 #if defined(CONFIG_USER_ONLY)
4423 GEN_PRIV;
4424 #else
4425 CHK_HV;
4427 gen_helper_tlbia(cpu_env);
4428 #endif /* defined(CONFIG_USER_ONLY) */
4431 /* tlbiel */
4432 static void gen_tlbiel(DisasContext *ctx)
4434 #if defined(CONFIG_USER_ONLY)
4435 GEN_PRIV;
4436 #else
4437 CHK_SV;
4439 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4440 #endif /* defined(CONFIG_USER_ONLY) */
4443 /* tlbie */
4444 static void gen_tlbie(DisasContext *ctx)
4446 #if defined(CONFIG_USER_ONLY)
4447 GEN_PRIV;
4448 #else
4449 TCGv_i32 t1;
4450 CHK_HV;
4452 if (NARROW_MODE(ctx)) {
4453 TCGv t0 = tcg_temp_new();
4454 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4455 gen_helper_tlbie(cpu_env, t0);
4456 tcg_temp_free(t0);
4457 } else {
4458 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4460 t1 = tcg_temp_new_i32();
4461 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
4462 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
4463 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
4464 tcg_temp_free_i32(t1);
4465 #endif /* defined(CONFIG_USER_ONLY) */
4468 /* tlbsync */
4469 static void gen_tlbsync(DisasContext *ctx)
4471 #if defined(CONFIG_USER_ONLY)
4472 GEN_PRIV;
4473 #else
4474 CHK_HV;
4476 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
4477 if (ctx->insns_flags & PPC_BOOKE) {
4478 gen_check_tlb_flush(ctx, true);
4480 #endif /* defined(CONFIG_USER_ONLY) */
4483 #if defined(TARGET_PPC64)
4484 /* slbia */
4485 static void gen_slbia(DisasContext *ctx)
4487 #if defined(CONFIG_USER_ONLY)
4488 GEN_PRIV;
4489 #else
4490 CHK_SV;
4492 gen_helper_slbia(cpu_env);
4493 #endif /* defined(CONFIG_USER_ONLY) */
4496 /* slbie */
4497 static void gen_slbie(DisasContext *ctx)
4499 #if defined(CONFIG_USER_ONLY)
4500 GEN_PRIV;
4501 #else
4502 CHK_SV;
4504 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4505 #endif /* defined(CONFIG_USER_ONLY) */
4507 #endif /* defined(TARGET_PPC64) */
4509 /*** External control ***/
4510 /* Optional: */
4512 /* eciwx */
4513 static void gen_eciwx(DisasContext *ctx)
4515 TCGv t0;
4516 /* Should check EAR[E] ! */
4517 gen_set_access_type(ctx, ACCESS_EXT);
4518 t0 = tcg_temp_new();
4519 gen_addr_reg_index(ctx, t0);
4520 gen_check_align(ctx, t0, 0x03);
4521 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4522 tcg_temp_free(t0);
4525 /* ecowx */
4526 static void gen_ecowx(DisasContext *ctx)
4528 TCGv t0;
4529 /* Should check EAR[E] ! */
4530 gen_set_access_type(ctx, ACCESS_EXT);
4531 t0 = tcg_temp_new();
4532 gen_addr_reg_index(ctx, t0);
4533 gen_check_align(ctx, t0, 0x03);
4534 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4535 tcg_temp_free(t0);
4538 /* PowerPC 601 specific instructions */
4540 /* abs - abs. */
4541 static void gen_abs(DisasContext *ctx)
4543 TCGLabel *l1 = gen_new_label();
4544 TCGLabel *l2 = gen_new_label();
4545 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4546 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4547 tcg_gen_br(l2);
4548 gen_set_label(l1);
4549 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4550 gen_set_label(l2);
4551 if (unlikely(Rc(ctx->opcode) != 0))
4552 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4555 /* abso - abso. */
4556 static void gen_abso(DisasContext *ctx)
4558 TCGLabel *l1 = gen_new_label();
4559 TCGLabel *l2 = gen_new_label();
4560 TCGLabel *l3 = gen_new_label();
4561 /* Start with XER OV disabled, the most likely case */
4562 tcg_gen_movi_tl(cpu_ov, 0);
4563 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4564 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4565 tcg_gen_movi_tl(cpu_ov, 1);
4566 tcg_gen_movi_tl(cpu_so, 1);
4567 tcg_gen_br(l2);
4568 gen_set_label(l1);
4569 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4570 tcg_gen_br(l3);
4571 gen_set_label(l2);
4572 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4573 gen_set_label(l3);
4574 if (unlikely(Rc(ctx->opcode) != 0))
4575 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4578 /* clcs */
4579 static void gen_clcs(DisasContext *ctx)
4581 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4582 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4583 tcg_temp_free_i32(t0);
4584 /* Rc=1 sets CR0 to an undefined state */
4587 /* div - div. */
4588 static void gen_div(DisasContext *ctx)
4590 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4591 cpu_gpr[rB(ctx->opcode)]);
4592 if (unlikely(Rc(ctx->opcode) != 0))
4593 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4596 /* divo - divo. */
4597 static void gen_divo(DisasContext *ctx)
4599 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4600 cpu_gpr[rB(ctx->opcode)]);
4601 if (unlikely(Rc(ctx->opcode) != 0))
4602 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4605 /* divs - divs. */
4606 static void gen_divs(DisasContext *ctx)
4608 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4609 cpu_gpr[rB(ctx->opcode)]);
4610 if (unlikely(Rc(ctx->opcode) != 0))
4611 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4614 /* divso - divso. */
4615 static void gen_divso(DisasContext *ctx)
4617 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
4618 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4619 if (unlikely(Rc(ctx->opcode) != 0))
4620 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4623 /* doz - doz. */
4624 static void gen_doz(DisasContext *ctx)
4626 TCGLabel *l1 = gen_new_label();
4627 TCGLabel *l2 = gen_new_label();
4628 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4629 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4630 tcg_gen_br(l2);
4631 gen_set_label(l1);
4632 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4633 gen_set_label(l2);
4634 if (unlikely(Rc(ctx->opcode) != 0))
4635 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4638 /* dozo - dozo. */
4639 static void gen_dozo(DisasContext *ctx)
4641 TCGLabel *l1 = gen_new_label();
4642 TCGLabel *l2 = gen_new_label();
4643 TCGv t0 = tcg_temp_new();
4644 TCGv t1 = tcg_temp_new();
4645 TCGv t2 = tcg_temp_new();
4646 /* Start with XER OV disabled, the most likely case */
4647 tcg_gen_movi_tl(cpu_ov, 0);
4648 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4649 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4650 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4651 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4652 tcg_gen_andc_tl(t1, t1, t2);
4653 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4654 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4655 tcg_gen_movi_tl(cpu_ov, 1);
4656 tcg_gen_movi_tl(cpu_so, 1);
4657 tcg_gen_br(l2);
4658 gen_set_label(l1);
4659 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4660 gen_set_label(l2);
4661 tcg_temp_free(t0);
4662 tcg_temp_free(t1);
4663 tcg_temp_free(t2);
4664 if (unlikely(Rc(ctx->opcode) != 0))
4665 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4668 /* dozi */
4669 static void gen_dozi(DisasContext *ctx)
4671 target_long simm = SIMM(ctx->opcode);
4672 TCGLabel *l1 = gen_new_label();
4673 TCGLabel *l2 = gen_new_label();
4674 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4675 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4676 tcg_gen_br(l2);
4677 gen_set_label(l1);
4678 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4679 gen_set_label(l2);
4680 if (unlikely(Rc(ctx->opcode) != 0))
4681 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4684 /* lscbx - lscbx. */
4685 static void gen_lscbx(DisasContext *ctx)
4687 TCGv t0 = tcg_temp_new();
4688 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4689 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4690 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4692 gen_addr_reg_index(ctx, t0);
4693 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
4694 tcg_temp_free_i32(t1);
4695 tcg_temp_free_i32(t2);
4696 tcg_temp_free_i32(t3);
4697 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4698 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4699 if (unlikely(Rc(ctx->opcode) != 0))
4700 gen_set_Rc0(ctx, t0);
4701 tcg_temp_free(t0);
4704 /* maskg - maskg. */
4705 static void gen_maskg(DisasContext *ctx)
4707 TCGLabel *l1 = gen_new_label();
4708 TCGv t0 = tcg_temp_new();
4709 TCGv t1 = tcg_temp_new();
4710 TCGv t2 = tcg_temp_new();
4711 TCGv t3 = tcg_temp_new();
4712 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4713 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4714 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4715 tcg_gen_addi_tl(t2, t0, 1);
4716 tcg_gen_shr_tl(t2, t3, t2);
4717 tcg_gen_shr_tl(t3, t3, t1);
4718 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4719 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4720 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4721 gen_set_label(l1);
4722 tcg_temp_free(t0);
4723 tcg_temp_free(t1);
4724 tcg_temp_free(t2);
4725 tcg_temp_free(t3);
4726 if (unlikely(Rc(ctx->opcode) != 0))
4727 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4730 /* maskir - maskir. */
4731 static void gen_maskir(DisasContext *ctx)
4733 TCGv t0 = tcg_temp_new();
4734 TCGv t1 = tcg_temp_new();
4735 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4736 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4737 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4738 tcg_temp_free(t0);
4739 tcg_temp_free(t1);
4740 if (unlikely(Rc(ctx->opcode) != 0))
4741 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4744 /* mul - mul. */
4745 static void gen_mul(DisasContext *ctx)
4747 TCGv_i64 t0 = tcg_temp_new_i64();
4748 TCGv_i64 t1 = tcg_temp_new_i64();
4749 TCGv t2 = tcg_temp_new();
4750 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4751 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4752 tcg_gen_mul_i64(t0, t0, t1);
4753 tcg_gen_trunc_i64_tl(t2, t0);
4754 gen_store_spr(SPR_MQ, t2);
4755 tcg_gen_shri_i64(t1, t0, 32);
4756 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4757 tcg_temp_free_i64(t0);
4758 tcg_temp_free_i64(t1);
4759 tcg_temp_free(t2);
4760 if (unlikely(Rc(ctx->opcode) != 0))
4761 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4764 /* mulo - mulo. */
4765 static void gen_mulo(DisasContext *ctx)
4767 TCGLabel *l1 = gen_new_label();
4768 TCGv_i64 t0 = tcg_temp_new_i64();
4769 TCGv_i64 t1 = tcg_temp_new_i64();
4770 TCGv t2 = tcg_temp_new();
4771 /* Start with XER OV disabled, the most likely case */
4772 tcg_gen_movi_tl(cpu_ov, 0);
4773 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4774 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4775 tcg_gen_mul_i64(t0, t0, t1);
4776 tcg_gen_trunc_i64_tl(t2, t0);
4777 gen_store_spr(SPR_MQ, t2);
4778 tcg_gen_shri_i64(t1, t0, 32);
4779 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4780 tcg_gen_ext32s_i64(t1, t0);
4781 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4782 tcg_gen_movi_tl(cpu_ov, 1);
4783 tcg_gen_movi_tl(cpu_so, 1);
4784 gen_set_label(l1);
4785 tcg_temp_free_i64(t0);
4786 tcg_temp_free_i64(t1);
4787 tcg_temp_free(t2);
4788 if (unlikely(Rc(ctx->opcode) != 0))
4789 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4792 /* nabs - nabs. */
4793 static void gen_nabs(DisasContext *ctx)
4795 TCGLabel *l1 = gen_new_label();
4796 TCGLabel *l2 = gen_new_label();
4797 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4798 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4799 tcg_gen_br(l2);
4800 gen_set_label(l1);
4801 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4802 gen_set_label(l2);
4803 if (unlikely(Rc(ctx->opcode) != 0))
4804 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4807 /* nabso - nabso. */
4808 static void gen_nabso(DisasContext *ctx)
4810 TCGLabel *l1 = gen_new_label();
4811 TCGLabel *l2 = gen_new_label();
4812 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4813 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4814 tcg_gen_br(l2);
4815 gen_set_label(l1);
4816 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4817 gen_set_label(l2);
4818 /* nabs never overflows */
4819 tcg_gen_movi_tl(cpu_ov, 0);
4820 if (unlikely(Rc(ctx->opcode) != 0))
4821 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4824 /* rlmi - rlmi. */
4825 static void gen_rlmi(DisasContext *ctx)
4827 uint32_t mb = MB(ctx->opcode);
4828 uint32_t me = ME(ctx->opcode);
4829 TCGv t0 = tcg_temp_new();
4830 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4831 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4832 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4833 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4834 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4835 tcg_temp_free(t0);
4836 if (unlikely(Rc(ctx->opcode) != 0))
4837 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4840 /* rrib - rrib. */
4841 static void gen_rrib(DisasContext *ctx)
4843 TCGv t0 = tcg_temp_new();
4844 TCGv t1 = tcg_temp_new();
4845 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4846 tcg_gen_movi_tl(t1, 0x80000000);
4847 tcg_gen_shr_tl(t1, t1, t0);
4848 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4849 tcg_gen_and_tl(t0, t0, t1);
4850 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4851 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4852 tcg_temp_free(t0);
4853 tcg_temp_free(t1);
4854 if (unlikely(Rc(ctx->opcode) != 0))
4855 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4858 /* sle - sle. */
4859 static void gen_sle(DisasContext *ctx)
4861 TCGv t0 = tcg_temp_new();
4862 TCGv t1 = tcg_temp_new();
4863 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4864 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4865 tcg_gen_subfi_tl(t1, 32, t1);
4866 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4867 tcg_gen_or_tl(t1, t0, t1);
4868 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4869 gen_store_spr(SPR_MQ, t1);
4870 tcg_temp_free(t0);
4871 tcg_temp_free(t1);
4872 if (unlikely(Rc(ctx->opcode) != 0))
4873 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4876 /* sleq - sleq. */
4877 static void gen_sleq(DisasContext *ctx)
4879 TCGv t0 = tcg_temp_new();
4880 TCGv t1 = tcg_temp_new();
4881 TCGv t2 = tcg_temp_new();
4882 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4883 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4884 tcg_gen_shl_tl(t2, t2, t0);
4885 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4886 gen_load_spr(t1, SPR_MQ);
4887 gen_store_spr(SPR_MQ, t0);
4888 tcg_gen_and_tl(t0, t0, t2);
4889 tcg_gen_andc_tl(t1, t1, t2);
4890 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4891 tcg_temp_free(t0);
4892 tcg_temp_free(t1);
4893 tcg_temp_free(t2);
4894 if (unlikely(Rc(ctx->opcode) != 0))
4895 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4898 /* sliq - sliq. */
4899 static void gen_sliq(DisasContext *ctx)
4901 int sh = SH(ctx->opcode);
4902 TCGv t0 = tcg_temp_new();
4903 TCGv t1 = tcg_temp_new();
4904 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4905 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4906 tcg_gen_or_tl(t1, t0, t1);
4907 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4908 gen_store_spr(SPR_MQ, t1);
4909 tcg_temp_free(t0);
4910 tcg_temp_free(t1);
4911 if (unlikely(Rc(ctx->opcode) != 0))
4912 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4915 /* slliq - slliq. */
4916 static void gen_slliq(DisasContext *ctx)
4918 int sh = SH(ctx->opcode);
4919 TCGv t0 = tcg_temp_new();
4920 TCGv t1 = tcg_temp_new();
4921 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4922 gen_load_spr(t1, SPR_MQ);
4923 gen_store_spr(SPR_MQ, t0);
4924 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4925 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4926 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4927 tcg_temp_free(t0);
4928 tcg_temp_free(t1);
4929 if (unlikely(Rc(ctx->opcode) != 0))
4930 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4933 /* sllq - sllq. */
4934 static void gen_sllq(DisasContext *ctx)
4936 TCGLabel *l1 = gen_new_label();
4937 TCGLabel *l2 = gen_new_label();
4938 TCGv t0 = tcg_temp_local_new();
4939 TCGv t1 = tcg_temp_local_new();
4940 TCGv t2 = tcg_temp_local_new();
4941 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4942 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4943 tcg_gen_shl_tl(t1, t1, t2);
4944 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4945 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4946 gen_load_spr(t0, SPR_MQ);
4947 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4948 tcg_gen_br(l2);
4949 gen_set_label(l1);
4950 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4951 gen_load_spr(t2, SPR_MQ);
4952 tcg_gen_andc_tl(t1, t2, t1);
4953 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4954 gen_set_label(l2);
4955 tcg_temp_free(t0);
4956 tcg_temp_free(t1);
4957 tcg_temp_free(t2);
4958 if (unlikely(Rc(ctx->opcode) != 0))
4959 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4962 /* slq - slq. */
4963 static void gen_slq(DisasContext *ctx)
4965 TCGLabel *l1 = gen_new_label();
4966 TCGv t0 = tcg_temp_new();
4967 TCGv t1 = tcg_temp_new();
4968 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4969 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4970 tcg_gen_subfi_tl(t1, 32, t1);
4971 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4972 tcg_gen_or_tl(t1, t0, t1);
4973 gen_store_spr(SPR_MQ, t1);
4974 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4975 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4976 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4977 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4978 gen_set_label(l1);
4979 tcg_temp_free(t0);
4980 tcg_temp_free(t1);
4981 if (unlikely(Rc(ctx->opcode) != 0))
4982 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4985 /* sraiq - sraiq. */
4986 static void gen_sraiq(DisasContext *ctx)
4988 int sh = SH(ctx->opcode);
4989 TCGLabel *l1 = gen_new_label();
4990 TCGv t0 = tcg_temp_new();
4991 TCGv t1 = tcg_temp_new();
4992 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4993 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4994 tcg_gen_or_tl(t0, t0, t1);
4995 gen_store_spr(SPR_MQ, t0);
4996 tcg_gen_movi_tl(cpu_ca, 0);
4997 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4998 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4999 tcg_gen_movi_tl(cpu_ca, 1);
5000 gen_set_label(l1);
5001 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
5002 tcg_temp_free(t0);
5003 tcg_temp_free(t1);
5004 if (unlikely(Rc(ctx->opcode) != 0))
5005 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5008 /* sraq - sraq. */
5009 static void gen_sraq(DisasContext *ctx)
5011 TCGLabel *l1 = gen_new_label();
5012 TCGLabel *l2 = gen_new_label();
5013 TCGv t0 = tcg_temp_new();
5014 TCGv t1 = tcg_temp_local_new();
5015 TCGv t2 = tcg_temp_local_new();
5016 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5017 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5018 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
5019 tcg_gen_subfi_tl(t2, 32, t2);
5020 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
5021 tcg_gen_or_tl(t0, t0, t2);
5022 gen_store_spr(SPR_MQ, t0);
5023 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5024 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
5025 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
5026 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
5027 gen_set_label(l1);
5028 tcg_temp_free(t0);
5029 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
5030 tcg_gen_movi_tl(cpu_ca, 0);
5031 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
5032 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
5033 tcg_gen_movi_tl(cpu_ca, 1);
5034 gen_set_label(l2);
5035 tcg_temp_free(t1);
5036 tcg_temp_free(t2);
5037 if (unlikely(Rc(ctx->opcode) != 0))
5038 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5041 /* sre - sre. */
5042 static void gen_sre(DisasContext *ctx)
5044 TCGv t0 = tcg_temp_new();
5045 TCGv t1 = tcg_temp_new();
5046 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5047 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5048 tcg_gen_subfi_tl(t1, 32, t1);
5049 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5050 tcg_gen_or_tl(t1, t0, t1);
5051 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5052 gen_store_spr(SPR_MQ, t1);
5053 tcg_temp_free(t0);
5054 tcg_temp_free(t1);
5055 if (unlikely(Rc(ctx->opcode) != 0))
5056 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5059 /* srea - srea. */
5060 static void gen_srea(DisasContext *ctx)
5062 TCGv t0 = tcg_temp_new();
5063 TCGv t1 = tcg_temp_new();
5064 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5065 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5066 gen_store_spr(SPR_MQ, t0);
5067 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
5068 tcg_temp_free(t0);
5069 tcg_temp_free(t1);
5070 if (unlikely(Rc(ctx->opcode) != 0))
5071 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5074 /* sreq */
5075 static void gen_sreq(DisasContext *ctx)
5077 TCGv t0 = tcg_temp_new();
5078 TCGv t1 = tcg_temp_new();
5079 TCGv t2 = tcg_temp_new();
5080 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5081 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5082 tcg_gen_shr_tl(t1, t1, t0);
5083 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5084 gen_load_spr(t2, SPR_MQ);
5085 gen_store_spr(SPR_MQ, t0);
5086 tcg_gen_and_tl(t0, t0, t1);
5087 tcg_gen_andc_tl(t2, t2, t1);
5088 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5089 tcg_temp_free(t0);
5090 tcg_temp_free(t1);
5091 tcg_temp_free(t2);
5092 if (unlikely(Rc(ctx->opcode) != 0))
5093 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5096 /* sriq */
5097 static void gen_sriq(DisasContext *ctx)
5099 int sh = SH(ctx->opcode);
5100 TCGv t0 = tcg_temp_new();
5101 TCGv t1 = tcg_temp_new();
5102 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5103 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5104 tcg_gen_or_tl(t1, t0, t1);
5105 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5106 gen_store_spr(SPR_MQ, t1);
5107 tcg_temp_free(t0);
5108 tcg_temp_free(t1);
5109 if (unlikely(Rc(ctx->opcode) != 0))
5110 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5113 /* srliq */
5114 static void gen_srliq(DisasContext *ctx)
5116 int sh = SH(ctx->opcode);
5117 TCGv t0 = tcg_temp_new();
5118 TCGv t1 = tcg_temp_new();
5119 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5120 gen_load_spr(t1, SPR_MQ);
5121 gen_store_spr(SPR_MQ, t0);
5122 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5123 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5124 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5125 tcg_temp_free(t0);
5126 tcg_temp_free(t1);
5127 if (unlikely(Rc(ctx->opcode) != 0))
5128 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5131 /* srlq */
5132 static void gen_srlq(DisasContext *ctx)
5134 TCGLabel *l1 = gen_new_label();
5135 TCGLabel *l2 = gen_new_label();
5136 TCGv t0 = tcg_temp_local_new();
5137 TCGv t1 = tcg_temp_local_new();
5138 TCGv t2 = tcg_temp_local_new();
5139 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5140 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5141 tcg_gen_shr_tl(t2, t1, t2);
5142 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5143 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5144 gen_load_spr(t0, SPR_MQ);
5145 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5146 tcg_gen_br(l2);
5147 gen_set_label(l1);
5148 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5149 tcg_gen_and_tl(t0, t0, t2);
5150 gen_load_spr(t1, SPR_MQ);
5151 tcg_gen_andc_tl(t1, t1, t2);
5152 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5153 gen_set_label(l2);
5154 tcg_temp_free(t0);
5155 tcg_temp_free(t1);
5156 tcg_temp_free(t2);
5157 if (unlikely(Rc(ctx->opcode) != 0))
5158 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5161 /* srq */
5162 static void gen_srq(DisasContext *ctx)
5164 TCGLabel *l1 = gen_new_label();
5165 TCGv t0 = tcg_temp_new();
5166 TCGv t1 = tcg_temp_new();
5167 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5168 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5169 tcg_gen_subfi_tl(t1, 32, t1);
5170 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5171 tcg_gen_or_tl(t1, t0, t1);
5172 gen_store_spr(SPR_MQ, t1);
5173 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5174 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5175 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5176 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5177 gen_set_label(l1);
5178 tcg_temp_free(t0);
5179 tcg_temp_free(t1);
5180 if (unlikely(Rc(ctx->opcode) != 0))
5181 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5184 /* PowerPC 602 specific instructions */
5186 /* dsa */
5187 static void gen_dsa(DisasContext *ctx)
5189 /* XXX: TODO */
5190 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5193 /* esa */
5194 static void gen_esa(DisasContext *ctx)
5196 /* XXX: TODO */
5197 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5200 /* mfrom */
5201 static void gen_mfrom(DisasContext *ctx)
5203 #if defined(CONFIG_USER_ONLY)
5204 GEN_PRIV;
5205 #else
5206 CHK_SV;
5207 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5208 #endif /* defined(CONFIG_USER_ONLY) */
5211 /* 602 - 603 - G2 TLB management */
5213 /* tlbld */
5214 static void gen_tlbld_6xx(DisasContext *ctx)
5216 #if defined(CONFIG_USER_ONLY)
5217 GEN_PRIV;
5218 #else
5219 CHK_SV;
5220 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5221 #endif /* defined(CONFIG_USER_ONLY) */
5224 /* tlbli */
5225 static void gen_tlbli_6xx(DisasContext *ctx)
5227 #if defined(CONFIG_USER_ONLY)
5228 GEN_PRIV;
5229 #else
5230 CHK_SV;
5231 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5232 #endif /* defined(CONFIG_USER_ONLY) */
5235 /* 74xx TLB management */
5237 /* tlbld */
5238 static void gen_tlbld_74xx(DisasContext *ctx)
5240 #if defined(CONFIG_USER_ONLY)
5241 GEN_PRIV;
5242 #else
5243 CHK_SV;
5244 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5245 #endif /* defined(CONFIG_USER_ONLY) */
5248 /* tlbli */
5249 static void gen_tlbli_74xx(DisasContext *ctx)
5251 #if defined(CONFIG_USER_ONLY)
5252 GEN_PRIV;
5253 #else
5254 CHK_SV;
5255 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5256 #endif /* defined(CONFIG_USER_ONLY) */
5259 /* POWER instructions not in PowerPC 601 */
5261 /* clf */
5262 static void gen_clf(DisasContext *ctx)
5264 /* Cache line flush: implemented as no-op */
5267 /* cli */
5268 static void gen_cli(DisasContext *ctx)
5270 #if defined(CONFIG_USER_ONLY)
5271 GEN_PRIV;
5272 #else
5273 /* Cache line invalidate: privileged and treated as no-op */
5274 CHK_SV;
5275 #endif /* defined(CONFIG_USER_ONLY) */
5278 /* dclst */
5279 static void gen_dclst(DisasContext *ctx)
5281 /* Data cache line store: treated as no-op */
5284 static void gen_mfsri(DisasContext *ctx)
5286 #if defined(CONFIG_USER_ONLY)
5287 GEN_PRIV;
5288 #else
5289 int ra = rA(ctx->opcode);
5290 int rd = rD(ctx->opcode);
5291 TCGv t0;
5293 CHK_SV;
5294 t0 = tcg_temp_new();
5295 gen_addr_reg_index(ctx, t0);
5296 tcg_gen_shri_tl(t0, t0, 28);
5297 tcg_gen_andi_tl(t0, t0, 0xF);
5298 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
5299 tcg_temp_free(t0);
5300 if (ra != 0 && ra != rd)
5301 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5302 #endif /* defined(CONFIG_USER_ONLY) */
5305 static void gen_rac(DisasContext *ctx)
5307 #if defined(CONFIG_USER_ONLY)
5308 GEN_PRIV;
5309 #else
5310 TCGv t0;
5312 CHK_SV;
5313 t0 = tcg_temp_new();
5314 gen_addr_reg_index(ctx, t0);
5315 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5316 tcg_temp_free(t0);
5317 #endif /* defined(CONFIG_USER_ONLY) */
5320 static void gen_rfsvc(DisasContext *ctx)
5322 #if defined(CONFIG_USER_ONLY)
5323 GEN_PRIV;
5324 #else
5325 CHK_SV;
5327 gen_helper_rfsvc(cpu_env);
5328 gen_sync_exception(ctx);
5329 #endif /* defined(CONFIG_USER_ONLY) */
5332 /* svc is not implemented for now */
5334 /* BookE specific instructions */
5336 /* XXX: not implemented on 440 ? */
5337 static void gen_mfapidi(DisasContext *ctx)
5339 /* XXX: TODO */
5340 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5343 /* XXX: not implemented on 440 ? */
5344 static void gen_tlbiva(DisasContext *ctx)
5346 #if defined(CONFIG_USER_ONLY)
5347 GEN_PRIV;
5348 #else
5349 TCGv t0;
5351 CHK_SV;
5352 t0 = tcg_temp_new();
5353 gen_addr_reg_index(ctx, t0);
5354 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5355 tcg_temp_free(t0);
5356 #endif /* defined(CONFIG_USER_ONLY) */
5359 /* All 405 MAC instructions are translated here */
5360 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5361 int ra, int rb, int rt, int Rc)
5363 TCGv t0, t1;
5365 t0 = tcg_temp_local_new();
5366 t1 = tcg_temp_local_new();
5368 switch (opc3 & 0x0D) {
5369 case 0x05:
5370 /* macchw - macchw. - macchwo - macchwo. */
5371 /* macchws - macchws. - macchwso - macchwso. */
5372 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5373 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5374 /* mulchw - mulchw. */
5375 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5376 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5377 tcg_gen_ext16s_tl(t1, t1);
5378 break;
5379 case 0x04:
5380 /* macchwu - macchwu. - macchwuo - macchwuo. */
5381 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5382 /* mulchwu - mulchwu. */
5383 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5384 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5385 tcg_gen_ext16u_tl(t1, t1);
5386 break;
5387 case 0x01:
5388 /* machhw - machhw. - machhwo - machhwo. */
5389 /* machhws - machhws. - machhwso - machhwso. */
5390 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5391 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5392 /* mulhhw - mulhhw. */
5393 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5394 tcg_gen_ext16s_tl(t0, t0);
5395 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5396 tcg_gen_ext16s_tl(t1, t1);
5397 break;
5398 case 0x00:
5399 /* machhwu - machhwu. - machhwuo - machhwuo. */
5400 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5401 /* mulhhwu - mulhhwu. */
5402 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5403 tcg_gen_ext16u_tl(t0, t0);
5404 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5405 tcg_gen_ext16u_tl(t1, t1);
5406 break;
5407 case 0x0D:
5408 /* maclhw - maclhw. - maclhwo - maclhwo. */
5409 /* maclhws - maclhws. - maclhwso - maclhwso. */
5410 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5411 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5412 /* mullhw - mullhw. */
5413 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5414 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5415 break;
5416 case 0x0C:
5417 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5418 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5419 /* mullhwu - mullhwu. */
5420 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5421 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5422 break;
5424 if (opc2 & 0x04) {
5425 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5426 tcg_gen_mul_tl(t1, t0, t1);
5427 if (opc2 & 0x02) {
5428 /* nmultiply-and-accumulate (0x0E) */
5429 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5430 } else {
5431 /* multiply-and-accumulate (0x0C) */
5432 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5435 if (opc3 & 0x12) {
5436 /* Check overflow and/or saturate */
5437 TCGLabel *l1 = gen_new_label();
5439 if (opc3 & 0x10) {
5440 /* Start with XER OV disabled, the most likely case */
5441 tcg_gen_movi_tl(cpu_ov, 0);
5443 if (opc3 & 0x01) {
5444 /* Signed */
5445 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5446 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5447 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5448 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5449 if (opc3 & 0x02) {
5450 /* Saturate */
5451 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5452 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5454 } else {
5455 /* Unsigned */
5456 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5457 if (opc3 & 0x02) {
5458 /* Saturate */
5459 tcg_gen_movi_tl(t0, UINT32_MAX);
5462 if (opc3 & 0x10) {
5463 /* Check overflow */
5464 tcg_gen_movi_tl(cpu_ov, 1);
5465 tcg_gen_movi_tl(cpu_so, 1);
5467 gen_set_label(l1);
5468 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5470 } else {
5471 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5473 tcg_temp_free(t0);
5474 tcg_temp_free(t1);
5475 if (unlikely(Rc) != 0) {
5476 /* Update Rc0 */
5477 gen_set_Rc0(ctx, cpu_gpr[rt]);
5481 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5482 static void glue(gen_, name)(DisasContext *ctx) \
5484 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5485 rD(ctx->opcode), Rc(ctx->opcode)); \
5488 /* macchw - macchw. */
5489 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5490 /* macchwo - macchwo. */
5491 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5492 /* macchws - macchws. */
5493 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5494 /* macchwso - macchwso. */
5495 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5496 /* macchwsu - macchwsu. */
5497 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5498 /* macchwsuo - macchwsuo. */
5499 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5500 /* macchwu - macchwu. */
5501 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5502 /* macchwuo - macchwuo. */
5503 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5504 /* machhw - machhw. */
5505 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5506 /* machhwo - machhwo. */
5507 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5508 /* machhws - machhws. */
5509 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5510 /* machhwso - machhwso. */
5511 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5512 /* machhwsu - machhwsu. */
5513 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5514 /* machhwsuo - machhwsuo. */
5515 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5516 /* machhwu - machhwu. */
5517 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5518 /* machhwuo - machhwuo. */
5519 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5520 /* maclhw - maclhw. */
5521 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5522 /* maclhwo - maclhwo. */
5523 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5524 /* maclhws - maclhws. */
5525 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5526 /* maclhwso - maclhwso. */
5527 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5528 /* maclhwu - maclhwu. */
5529 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5530 /* maclhwuo - maclhwuo. */
5531 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5532 /* maclhwsu - maclhwsu. */
5533 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5534 /* maclhwsuo - maclhwsuo. */
5535 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5536 /* nmacchw - nmacchw. */
5537 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5538 /* nmacchwo - nmacchwo. */
5539 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5540 /* nmacchws - nmacchws. */
5541 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5542 /* nmacchwso - nmacchwso. */
5543 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5544 /* nmachhw - nmachhw. */
5545 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5546 /* nmachhwo - nmachhwo. */
5547 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5548 /* nmachhws - nmachhws. */
5549 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5550 /* nmachhwso - nmachhwso. */
5551 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5552 /* nmaclhw - nmaclhw. */
5553 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5554 /* nmaclhwo - nmaclhwo. */
5555 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5556 /* nmaclhws - nmaclhws. */
5557 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5558 /* nmaclhwso - nmaclhwso. */
5559 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5561 /* mulchw - mulchw. */
5562 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5563 /* mulchwu - mulchwu. */
5564 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5565 /* mulhhw - mulhhw. */
5566 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5567 /* mulhhwu - mulhhwu. */
5568 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5569 /* mullhw - mullhw. */
5570 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5571 /* mullhwu - mullhwu. */
5572 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5574 /* mfdcr */
5575 static void gen_mfdcr(DisasContext *ctx)
5577 #if defined(CONFIG_USER_ONLY)
5578 GEN_PRIV;
5579 #else
5580 TCGv dcrn;
5582 CHK_SV;
5583 dcrn = tcg_const_tl(SPR(ctx->opcode));
5584 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5585 tcg_temp_free(dcrn);
5586 #endif /* defined(CONFIG_USER_ONLY) */
5589 /* mtdcr */
5590 static void gen_mtdcr(DisasContext *ctx)
5592 #if defined(CONFIG_USER_ONLY)
5593 GEN_PRIV;
5594 #else
5595 TCGv dcrn;
5597 CHK_SV;
5598 dcrn = tcg_const_tl(SPR(ctx->opcode));
5599 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5600 tcg_temp_free(dcrn);
5601 #endif /* defined(CONFIG_USER_ONLY) */
5604 /* mfdcrx */
5605 /* XXX: not implemented on 440 ? */
5606 static void gen_mfdcrx(DisasContext *ctx)
5608 #if defined(CONFIG_USER_ONLY)
5609 GEN_PRIV;
5610 #else
5611 CHK_SV;
5612 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5613 cpu_gpr[rA(ctx->opcode)]);
5614 /* Note: Rc update flag set leads to undefined state of Rc0 */
5615 #endif /* defined(CONFIG_USER_ONLY) */
5618 /* mtdcrx */
5619 /* XXX: not implemented on 440 ? */
5620 static void gen_mtdcrx(DisasContext *ctx)
5622 #if defined(CONFIG_USER_ONLY)
5623 GEN_PRIV;
5624 #else
5625 CHK_SV;
5626 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5627 cpu_gpr[rS(ctx->opcode)]);
5628 /* Note: Rc update flag set leads to undefined state of Rc0 */
5629 #endif /* defined(CONFIG_USER_ONLY) */
5632 /* mfdcrux (PPC 460) : user-mode access to DCR */
5633 static void gen_mfdcrux(DisasContext *ctx)
5635 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5636 cpu_gpr[rA(ctx->opcode)]);
5637 /* Note: Rc update flag set leads to undefined state of Rc0 */
5640 /* mtdcrux (PPC 460) : user-mode access to DCR */
5641 static void gen_mtdcrux(DisasContext *ctx)
5643 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5644 cpu_gpr[rS(ctx->opcode)]);
5645 /* Note: Rc update flag set leads to undefined state of Rc0 */
5648 /* dccci */
5649 static void gen_dccci(DisasContext *ctx)
5651 CHK_SV;
5652 /* interpreted as no-op */
5655 /* dcread */
5656 static void gen_dcread(DisasContext *ctx)
5658 #if defined(CONFIG_USER_ONLY)
5659 GEN_PRIV;
5660 #else
5661 TCGv EA, val;
5663 CHK_SV;
5664 gen_set_access_type(ctx, ACCESS_CACHE);
5665 EA = tcg_temp_new();
5666 gen_addr_reg_index(ctx, EA);
5667 val = tcg_temp_new();
5668 gen_qemu_ld32u(ctx, val, EA);
5669 tcg_temp_free(val);
5670 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5671 tcg_temp_free(EA);
5672 #endif /* defined(CONFIG_USER_ONLY) */
5675 /* icbt */
5676 static void gen_icbt_40x(DisasContext *ctx)
5678 /* interpreted as no-op */
5679 /* XXX: specification say this is treated as a load by the MMU
5680 * but does not generate any exception
5684 /* iccci */
5685 static void gen_iccci(DisasContext *ctx)
5687 CHK_SV;
5688 /* interpreted as no-op */
5691 /* icread */
5692 static void gen_icread(DisasContext *ctx)
5694 CHK_SV;
5695 /* interpreted as no-op */
5698 /* rfci (supervisor only) */
5699 static void gen_rfci_40x(DisasContext *ctx)
5701 #if defined(CONFIG_USER_ONLY)
5702 GEN_PRIV;
5703 #else
5704 CHK_SV;
5705 /* Restore CPU state */
5706 gen_helper_40x_rfci(cpu_env);
5707 gen_sync_exception(ctx);
5708 #endif /* defined(CONFIG_USER_ONLY) */
5711 static void gen_rfci(DisasContext *ctx)
5713 #if defined(CONFIG_USER_ONLY)
5714 GEN_PRIV;
5715 #else
5716 CHK_SV;
5717 /* Restore CPU state */
5718 gen_helper_rfci(cpu_env);
5719 gen_sync_exception(ctx);
5720 #endif /* defined(CONFIG_USER_ONLY) */
5723 /* BookE specific */
5725 /* XXX: not implemented on 440 ? */
5726 static void gen_rfdi(DisasContext *ctx)
5728 #if defined(CONFIG_USER_ONLY)
5729 GEN_PRIV;
5730 #else
5731 CHK_SV;
5732 /* Restore CPU state */
5733 gen_helper_rfdi(cpu_env);
5734 gen_sync_exception(ctx);
5735 #endif /* defined(CONFIG_USER_ONLY) */
5738 /* XXX: not implemented on 440 ? */
5739 static void gen_rfmci(DisasContext *ctx)
5741 #if defined(CONFIG_USER_ONLY)
5742 GEN_PRIV;
5743 #else
5744 CHK_SV;
5745 /* Restore CPU state */
5746 gen_helper_rfmci(cpu_env);
5747 gen_sync_exception(ctx);
5748 #endif /* defined(CONFIG_USER_ONLY) */
5751 /* TLB management - PowerPC 405 implementation */
5753 /* tlbre */
5754 static void gen_tlbre_40x(DisasContext *ctx)
5756 #if defined(CONFIG_USER_ONLY)
5757 GEN_PRIV;
5758 #else
5759 CHK_SV;
5760 switch (rB(ctx->opcode)) {
5761 case 0:
5762 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5763 cpu_gpr[rA(ctx->opcode)]);
5764 break;
5765 case 1:
5766 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5767 cpu_gpr[rA(ctx->opcode)]);
5768 break;
5769 default:
5770 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5771 break;
5773 #endif /* defined(CONFIG_USER_ONLY) */
5776 /* tlbsx - tlbsx. */
5777 static void gen_tlbsx_40x(DisasContext *ctx)
5779 #if defined(CONFIG_USER_ONLY)
5780 GEN_PRIV;
5781 #else
5782 TCGv t0;
5784 CHK_SV;
5785 t0 = tcg_temp_new();
5786 gen_addr_reg_index(ctx, t0);
5787 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5788 tcg_temp_free(t0);
5789 if (Rc(ctx->opcode)) {
5790 TCGLabel *l1 = gen_new_label();
5791 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5792 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5793 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5794 gen_set_label(l1);
5796 #endif /* defined(CONFIG_USER_ONLY) */
5799 /* tlbwe */
5800 static void gen_tlbwe_40x(DisasContext *ctx)
5802 #if defined(CONFIG_USER_ONLY)
5803 GEN_PRIV;
5804 #else
5805 CHK_SV;
5807 switch (rB(ctx->opcode)) {
5808 case 0:
5809 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
5810 cpu_gpr[rS(ctx->opcode)]);
5811 break;
5812 case 1:
5813 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
5814 cpu_gpr[rS(ctx->opcode)]);
5815 break;
5816 default:
5817 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5818 break;
5820 #endif /* defined(CONFIG_USER_ONLY) */
5823 /* TLB management - PowerPC 440 implementation */
5825 /* tlbre */
5826 static void gen_tlbre_440(DisasContext *ctx)
5828 #if defined(CONFIG_USER_ONLY)
5829 GEN_PRIV;
5830 #else
5831 CHK_SV;
5833 switch (rB(ctx->opcode)) {
5834 case 0:
5835 case 1:
5836 case 2:
5838 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5839 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
5840 t0, cpu_gpr[rA(ctx->opcode)]);
5841 tcg_temp_free_i32(t0);
5843 break;
5844 default:
5845 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5846 break;
5848 #endif /* defined(CONFIG_USER_ONLY) */
5851 /* tlbsx - tlbsx. */
5852 static void gen_tlbsx_440(DisasContext *ctx)
5854 #if defined(CONFIG_USER_ONLY)
5855 GEN_PRIV;
5856 #else
5857 TCGv t0;
5859 CHK_SV;
5860 t0 = tcg_temp_new();
5861 gen_addr_reg_index(ctx, t0);
5862 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5863 tcg_temp_free(t0);
5864 if (Rc(ctx->opcode)) {
5865 TCGLabel *l1 = gen_new_label();
5866 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5867 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5868 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5869 gen_set_label(l1);
5871 #endif /* defined(CONFIG_USER_ONLY) */
5874 /* tlbwe */
5875 static void gen_tlbwe_440(DisasContext *ctx)
5877 #if defined(CONFIG_USER_ONLY)
5878 GEN_PRIV;
5879 #else
5880 CHK_SV;
5881 switch (rB(ctx->opcode)) {
5882 case 0:
5883 case 1:
5884 case 2:
5886 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5887 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
5888 cpu_gpr[rS(ctx->opcode)]);
5889 tcg_temp_free_i32(t0);
5891 break;
5892 default:
5893 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5894 break;
5896 #endif /* defined(CONFIG_USER_ONLY) */
5899 /* TLB management - PowerPC BookE 2.06 implementation */
5901 /* tlbre */
5902 static void gen_tlbre_booke206(DisasContext *ctx)
5904 #if defined(CONFIG_USER_ONLY)
5905 GEN_PRIV;
5906 #else
5907 CHK_SV;
5908 gen_helper_booke206_tlbre(cpu_env);
5909 #endif /* defined(CONFIG_USER_ONLY) */
5912 /* tlbsx - tlbsx. */
5913 static void gen_tlbsx_booke206(DisasContext *ctx)
5915 #if defined(CONFIG_USER_ONLY)
5916 GEN_PRIV;
5917 #else
5918 TCGv t0;
5920 CHK_SV;
5921 if (rA(ctx->opcode)) {
5922 t0 = tcg_temp_new();
5923 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
5924 } else {
5925 t0 = tcg_const_tl(0);
5928 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
5929 gen_helper_booke206_tlbsx(cpu_env, t0);
5930 tcg_temp_free(t0);
5931 #endif /* defined(CONFIG_USER_ONLY) */
5934 /* tlbwe */
5935 static void gen_tlbwe_booke206(DisasContext *ctx)
5937 #if defined(CONFIG_USER_ONLY)
5938 GEN_PRIV;
5939 #else
5940 CHK_SV;
5941 gen_helper_booke206_tlbwe(cpu_env);
5942 #endif /* defined(CONFIG_USER_ONLY) */
5945 static void gen_tlbivax_booke206(DisasContext *ctx)
5947 #if defined(CONFIG_USER_ONLY)
5948 GEN_PRIV;
5949 #else
5950 TCGv t0;
5952 CHK_SV;
5953 t0 = tcg_temp_new();
5954 gen_addr_reg_index(ctx, t0);
5955 gen_helper_booke206_tlbivax(cpu_env, t0);
5956 tcg_temp_free(t0);
5957 #endif /* defined(CONFIG_USER_ONLY) */
5960 static void gen_tlbilx_booke206(DisasContext *ctx)
5962 #if defined(CONFIG_USER_ONLY)
5963 GEN_PRIV;
5964 #else
5965 TCGv t0;
5967 CHK_SV;
5968 t0 = tcg_temp_new();
5969 gen_addr_reg_index(ctx, t0);
5971 switch((ctx->opcode >> 21) & 0x3) {
5972 case 0:
5973 gen_helper_booke206_tlbilx0(cpu_env, t0);
5974 break;
5975 case 1:
5976 gen_helper_booke206_tlbilx1(cpu_env, t0);
5977 break;
5978 case 3:
5979 gen_helper_booke206_tlbilx3(cpu_env, t0);
5980 break;
5981 default:
5982 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5983 break;
5986 tcg_temp_free(t0);
5987 #endif /* defined(CONFIG_USER_ONLY) */
5991 /* wrtee */
5992 static void gen_wrtee(DisasContext *ctx)
5994 #if defined(CONFIG_USER_ONLY)
5995 GEN_PRIV;
5996 #else
5997 TCGv t0;
5999 CHK_SV;
6000 t0 = tcg_temp_new();
6001 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6002 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6003 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6004 tcg_temp_free(t0);
6005 /* Stop translation to have a chance to raise an exception
6006 * if we just set msr_ee to 1
6008 gen_stop_exception(ctx);
6009 #endif /* defined(CONFIG_USER_ONLY) */
6012 /* wrteei */
6013 static void gen_wrteei(DisasContext *ctx)
6015 #if defined(CONFIG_USER_ONLY)
6016 GEN_PRIV;
6017 #else
6018 CHK_SV;
6019 if (ctx->opcode & 0x00008000) {
6020 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6021 /* Stop translation to have a chance to raise an exception */
6022 gen_stop_exception(ctx);
6023 } else {
6024 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6026 #endif /* defined(CONFIG_USER_ONLY) */
6029 /* PowerPC 440 specific instructions */
6031 /* dlmzb */
6032 static void gen_dlmzb(DisasContext *ctx)
6034 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6035 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6036 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6037 tcg_temp_free_i32(t0);
6040 /* mbar replaces eieio on 440 */
6041 static void gen_mbar(DisasContext *ctx)
6043 /* interpreted as no-op */
6046 /* msync replaces sync on 440 */
6047 static void gen_msync_4xx(DisasContext *ctx)
6049 /* interpreted as no-op */
6052 /* icbt */
6053 static void gen_icbt_440(DisasContext *ctx)
6055 /* interpreted as no-op */
6056 /* XXX: specification say this is treated as a load by the MMU
6057 * but does not generate any exception
6061 /* Embedded.Processor Control */
6063 static void gen_msgclr(DisasContext *ctx)
6065 #if defined(CONFIG_USER_ONLY)
6066 GEN_PRIV;
6067 #else
6068 CHK_SV;
6069 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6070 #endif /* defined(CONFIG_USER_ONLY) */
6073 static void gen_msgsnd(DisasContext *ctx)
6075 #if defined(CONFIG_USER_ONLY)
6076 GEN_PRIV;
6077 #else
6078 CHK_SV;
6079 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6080 #endif /* defined(CONFIG_USER_ONLY) */
6084 #if defined(TARGET_PPC64)
6085 static void gen_maddld(DisasContext *ctx)
6087 TCGv_i64 t1 = tcg_temp_new_i64();
6089 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6090 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6091 tcg_temp_free_i64(t1);
6094 /* maddhd maddhdu */
6095 static void gen_maddhd_maddhdu(DisasContext *ctx)
6097 TCGv_i64 lo = tcg_temp_new_i64();
6098 TCGv_i64 hi = tcg_temp_new_i64();
6099 TCGv_i64 t1 = tcg_temp_new_i64();
6101 if (Rc(ctx->opcode)) {
6102 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6103 cpu_gpr[rB(ctx->opcode)]);
6104 tcg_gen_movi_i64(t1, 0);
6105 } else {
6106 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6107 cpu_gpr[rB(ctx->opcode)]);
6108 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6110 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6111 cpu_gpr[rC(ctx->opcode)], t1);
6112 tcg_temp_free_i64(lo);
6113 tcg_temp_free_i64(hi);
6114 tcg_temp_free_i64(t1);
6116 #endif /* defined(TARGET_PPC64) */
6118 static void gen_tbegin(DisasContext *ctx)
6120 if (unlikely(!ctx->tm_enabled)) {
6121 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6122 return;
6124 gen_helper_tbegin(cpu_env);
6127 #define GEN_TM_NOOP(name) \
6128 static inline void gen_##name(DisasContext *ctx) \
6130 if (unlikely(!ctx->tm_enabled)) { \
6131 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
6132 return; \
6134 /* Because tbegin always fails in QEMU, these user \
6135 * space instructions all have a simple implementation: \
6137 * CR[0] = 0b0 || MSR[TS] || 0b0 \
6138 * = 0b0 || 0b00 || 0b0 \
6139 */ \
6140 tcg_gen_movi_i32(cpu_crf[0], 0); \
6143 GEN_TM_NOOP(tend);
6144 GEN_TM_NOOP(tabort);
6145 GEN_TM_NOOP(tabortwc);
6146 GEN_TM_NOOP(tabortwci);
6147 GEN_TM_NOOP(tabortdc);
6148 GEN_TM_NOOP(tabortdci);
6149 GEN_TM_NOOP(tsr);
6151 static void gen_tcheck(DisasContext *ctx)
6153 if (unlikely(!ctx->tm_enabled)) {
6154 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6155 return;
6157 /* Because tbegin always fails, the tcheck implementation
6158 * is simple:
6160 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6161 * = 0b1 || 0b00 || 0b0
6163 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6166 #if defined(CONFIG_USER_ONLY)
6167 #define GEN_TM_PRIV_NOOP(name) \
6168 static inline void gen_##name(DisasContext *ctx) \
6170 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
6173 #else
6175 #define GEN_TM_PRIV_NOOP(name) \
6176 static inline void gen_##name(DisasContext *ctx) \
6178 CHK_SV; \
6179 if (unlikely(!ctx->tm_enabled)) { \
6180 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
6181 return; \
6183 /* Because tbegin always fails, the implementation is \
6184 * simple: \
6186 * CR[0] = 0b0 || MSR[TS] || 0b0 \
6187 * = 0b0 || 0b00 | 0b0 \
6188 */ \
6189 tcg_gen_movi_i32(cpu_crf[0], 0); \
6192 #endif
6194 GEN_TM_PRIV_NOOP(treclaim);
6195 GEN_TM_PRIV_NOOP(trechkpt);
6197 #include "translate/fp-impl.inc.c"
6199 #include "translate/vmx-impl.inc.c"
6201 #include "translate/vsx-impl.inc.c"
6203 #include "translate/dfp-impl.inc.c"
6205 #include "translate/spe-impl.inc.c"
6207 static opcode_t opcodes[] = {
6208 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6209 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
6210 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
6211 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER),
6212 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
6213 #if defined(TARGET_PPC64)
6214 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6215 #endif
6216 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6217 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6218 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6219 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6220 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6221 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6222 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6223 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6224 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6225 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6226 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6227 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6228 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6229 #if defined(TARGET_PPC64)
6230 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6231 #endif
6232 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6233 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6234 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6235 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6236 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6237 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6238 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6239 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6240 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6241 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6242 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6243 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6244 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6245 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6246 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6247 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6248 #if defined(TARGET_PPC64)
6249 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6250 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6251 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6252 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6253 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6254 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6255 #endif
6256 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6257 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6258 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6259 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6260 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6261 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6262 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6263 #if defined(TARGET_PPC64)
6264 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6265 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6266 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6267 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6268 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6269 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6270 PPC_NONE, PPC2_ISA300),
6271 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6272 PPC_NONE, PPC2_ISA300),
6273 #endif
6274 #if defined(TARGET_PPC64)
6275 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
6276 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
6277 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
6278 #endif
6279 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6280 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6281 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6282 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6283 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6284 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6285 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
6286 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6287 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6288 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6289 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6290 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6291 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6292 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6293 #if defined(TARGET_PPC64)
6294 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6295 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6296 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6297 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6298 #endif
6299 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6300 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
6301 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6302 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6303 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6304 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6305 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6306 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6307 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6308 #if defined(TARGET_PPC64)
6309 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6310 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6311 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6312 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6313 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6314 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6315 #endif
6316 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
6317 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6318 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6319 #if defined(TARGET_PPC64)
6320 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6321 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6322 #endif
6323 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6324 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6325 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6326 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6327 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6328 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6329 #if defined(TARGET_PPC64)
6330 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6331 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6332 #endif
6333 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6334 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6335 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6336 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6337 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6338 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6339 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6340 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6341 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6342 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6343 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
6344 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6345 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6346 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6347 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6348 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6349 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6350 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6351 #if defined(TARGET_PPC64)
6352 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6353 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6354 PPC_SEGMENT_64B),
6355 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6356 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6357 PPC_SEGMENT_64B),
6358 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
6359 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
6360 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
6361 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
6362 #endif
6363 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6364 /* XXX Those instructions will need to be handled differently for
6365 * different ISA versions */
6366 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
6367 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
6368 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6369 #if defined(TARGET_PPC64)
6370 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
6371 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
6372 #endif
6373 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6374 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6375 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
6376 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
6377 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
6378 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
6379 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
6380 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
6381 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
6382 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
6383 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
6384 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
6385 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
6386 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
6387 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
6388 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
6389 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
6390 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
6391 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
6392 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
6393 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
6394 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
6395 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
6396 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
6397 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
6398 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
6399 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
6400 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
6401 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
6402 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
6403 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
6404 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
6405 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
6406 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
6407 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
6408 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
6409 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
6410 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
6411 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
6412 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6413 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6414 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
6415 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
6416 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
6417 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
6418 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
6419 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
6420 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
6421 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
6422 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6423 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6424 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
6425 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
6426 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6427 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
6428 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
6429 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
6430 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6431 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6432 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6433 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6434 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6435 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6436 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
6437 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
6438 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6439 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6440 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6441 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6442 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6443 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6444 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6445 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6446 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6447 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6448 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6449 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6450 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6451 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6452 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6453 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6454 PPC_NONE, PPC2_BOOKE206),
6455 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6456 PPC_NONE, PPC2_BOOKE206),
6457 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6458 PPC_NONE, PPC2_BOOKE206),
6459 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6460 PPC_NONE, PPC2_BOOKE206),
6461 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6462 PPC_NONE, PPC2_BOOKE206),
6463 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
6464 PPC_NONE, PPC2_PRCNTL),
6465 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
6466 PPC_NONE, PPC2_PRCNTL),
6467 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6468 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6469 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6470 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6471 PPC_BOOKE, PPC2_BOOKE206),
6472 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE),
6473 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6474 PPC_BOOKE, PPC2_BOOKE206),
6475 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6476 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6477 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6478 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6479 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
6480 #if defined(TARGET_PPC64)
6481 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6482 PPC2_ISA300),
6483 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6484 #endif
6486 #undef GEN_INT_ARITH_ADD
6487 #undef GEN_INT_ARITH_ADD_CONST
6488 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
6489 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6490 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
6491 add_ca, compute_ca, compute_ov) \
6492 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6493 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6494 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6495 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6496 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6497 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6498 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6499 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6500 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6501 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6502 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6504 #undef GEN_INT_ARITH_DIVW
6505 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
6506 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6507 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6508 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6509 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6510 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6511 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6512 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6513 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6514 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6515 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6516 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6518 #if defined(TARGET_PPC64)
6519 #undef GEN_INT_ARITH_DIVD
6520 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
6521 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6522 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6523 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6524 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6525 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6527 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6528 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6529 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6530 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6531 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6532 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6534 #undef GEN_INT_ARITH_MUL_HELPER
6535 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
6536 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6537 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6538 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6539 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6540 #endif
6542 #undef GEN_INT_ARITH_SUBF
6543 #undef GEN_INT_ARITH_SUBF_CONST
6544 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
6545 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6546 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
6547 add_ca, compute_ca, compute_ov) \
6548 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6549 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6550 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6551 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6552 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6553 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6554 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6555 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6556 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6557 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6558 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6560 #undef GEN_LOGICAL1
6561 #undef GEN_LOGICAL2
6562 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
6563 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6564 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
6565 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6566 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6567 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6568 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6569 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6570 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6571 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6572 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6573 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6574 #if defined(TARGET_PPC64)
6575 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6576 #endif
6578 #if defined(TARGET_PPC64)
6579 #undef GEN_PPC64_R2
6580 #undef GEN_PPC64_R4
6581 #define GEN_PPC64_R2(name, opc1, opc2) \
6582 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6583 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
6584 PPC_64B)
6585 #define GEN_PPC64_R4(name, opc1, opc2) \
6586 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6587 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
6588 PPC_64B), \
6589 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
6590 PPC_64B), \
6591 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
6592 PPC_64B)
6593 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
6594 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
6595 GEN_PPC64_R4(rldic, 0x1E, 0x04),
6596 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
6597 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
6598 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6599 #endif
6601 #undef GEN_LD
6602 #undef GEN_LDU
6603 #undef GEN_LDUX
6604 #undef GEN_LDX_E
6605 #undef GEN_LDS
6606 #define GEN_LD(name, ldop, opc, type) \
6607 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
6608 #define GEN_LDU(name, ldop, opc, type) \
6609 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
6610 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
6611 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
6612 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
6613 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6614 #define GEN_LDS(name, ldop, op, type) \
6615 GEN_LD(name, ldop, op | 0x20, type) \
6616 GEN_LDU(name, ldop, op | 0x21, type) \
6617 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
6618 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
6620 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
6621 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
6622 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
6623 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
6624 #if defined(TARGET_PPC64)
6625 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
6626 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
6627 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B)
6628 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B)
6629 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6631 /* HV/P7 and later only */
6632 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6633 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6634 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
6635 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
6636 #endif
6637 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6638 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6640 #undef GEN_ST
6641 #undef GEN_STU
6642 #undef GEN_STUX
6643 #undef GEN_STX_E
6644 #undef GEN_STS
6645 #define GEN_ST(name, stop, opc, type) \
6646 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
6647 #define GEN_STU(name, stop, opc, type) \
6648 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
6649 #define GEN_STUX(name, stop, opc2, opc3, type) \
6650 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
6651 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
6652 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6653 #define GEN_STS(name, stop, op, type) \
6654 GEN_ST(name, stop, op | 0x20, type) \
6655 GEN_STU(name, stop, op | 0x21, type) \
6656 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
6657 GEN_STX(name, stop, 0x17, op | 0x00, type)
6659 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
6660 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
6661 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
6662 #if defined(TARGET_PPC64)
6663 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B)
6664 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B)
6665 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6666 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6667 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6668 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6669 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6670 #endif
6671 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6672 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6674 #undef GEN_CRLOGIC
6675 #define GEN_CRLOGIC(name, tcg_op, opc) \
6676 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6677 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6678 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6679 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6680 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6681 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6682 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6683 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6684 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6686 #undef GEN_MAC_HANDLER
6687 #define GEN_MAC_HANDLER(name, opc2, opc3) \
6688 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6689 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6690 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6691 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6692 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6693 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6694 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6695 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6696 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6697 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6698 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6699 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6700 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6701 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6702 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6703 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6704 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6705 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6706 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6707 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6708 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6709 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6710 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6711 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6712 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6713 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6714 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6715 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6716 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6717 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6718 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6719 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6720 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6721 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6722 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6723 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6724 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6725 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6726 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6727 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6728 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6729 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6730 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6732 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6733 PPC_NONE, PPC2_TM),
6734 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
6735 PPC_NONE, PPC2_TM),
6736 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6737 PPC_NONE, PPC2_TM),
6738 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6739 PPC_NONE, PPC2_TM),
6740 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6741 PPC_NONE, PPC2_TM),
6742 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6743 PPC_NONE, PPC2_TM),
6744 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6745 PPC_NONE, PPC2_TM),
6746 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6747 PPC_NONE, PPC2_TM),
6748 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6749 PPC_NONE, PPC2_TM),
6750 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6751 PPC_NONE, PPC2_TM),
6752 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6753 PPC_NONE, PPC2_TM),
6755 #include "translate/fp-ops.inc.c"
6757 #include "translate/vmx-ops.inc.c"
6759 #include "translate/vsx-ops.inc.c"
6761 #include "translate/dfp-ops.inc.c"
6763 #include "translate/spe-ops.inc.c"
6766 #include "helper_regs.h"
6767 #include "translate_init.c"
6769 /*****************************************************************************/
6770 /* Misc PowerPC helpers */
6771 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
6772 int flags)
6774 #define RGPL 4
6775 #define RFPL 4
6777 PowerPCCPU *cpu = POWERPC_CPU(cs);
6778 CPUPPCState *env = &cpu->env;
6779 int i;
6781 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
6782 TARGET_FMT_lx " XER " TARGET_FMT_lx " CPU#%d\n",
6783 env->nip, env->lr, env->ctr, cpu_read_xer(env),
6784 cs->cpu_index);
6785 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
6786 TARGET_FMT_lx " iidx %d didx %d\n",
6787 env->msr, env->spr[SPR_HID0],
6788 env->hflags, env->immu_idx, env->dmmu_idx);
6789 #if !defined(NO_TIMER_DUMP)
6790 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
6791 #if !defined(CONFIG_USER_ONLY)
6792 " DECR %08" PRIu32
6793 #endif
6794 "\n",
6795 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
6796 #if !defined(CONFIG_USER_ONLY)
6797 , cpu_ppc_load_decr(env)
6798 #endif
6800 #endif
6801 for (i = 0; i < 32; i++) {
6802 if ((i & (RGPL - 1)) == 0)
6803 cpu_fprintf(f, "GPR%02d", i);
6804 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
6805 if ((i & (RGPL - 1)) == (RGPL - 1))
6806 cpu_fprintf(f, "\n");
6808 cpu_fprintf(f, "CR ");
6809 for (i = 0; i < 8; i++)
6810 cpu_fprintf(f, "%01x", env->crf[i]);
6811 cpu_fprintf(f, " [");
6812 for (i = 0; i < 8; i++) {
6813 char a = '-';
6814 if (env->crf[i] & 0x08)
6815 a = 'L';
6816 else if (env->crf[i] & 0x04)
6817 a = 'G';
6818 else if (env->crf[i] & 0x02)
6819 a = 'E';
6820 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
6822 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
6823 env->reserve_addr);
6824 for (i = 0; i < 32; i++) {
6825 if ((i & (RFPL - 1)) == 0)
6826 cpu_fprintf(f, "FPR%02d", i);
6827 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
6828 if ((i & (RFPL - 1)) == (RFPL - 1))
6829 cpu_fprintf(f, "\n");
6831 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr);
6832 #if !defined(CONFIG_USER_ONLY)
6833 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
6834 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
6835 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
6836 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
6838 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
6839 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
6840 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
6841 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
6843 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
6844 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
6845 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
6846 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
6848 #if defined(TARGET_PPC64)
6849 if (env->excp_model == POWERPC_EXCP_POWER7 ||
6850 env->excp_model == POWERPC_EXCP_POWER8) {
6851 cpu_fprintf(f, "HSRR0 " TARGET_FMT_lx " HSRR1 " TARGET_FMT_lx "\n",
6852 env->spr[SPR_HSRR0], env->spr[SPR_HSRR1]);
6854 #endif
6855 if (env->excp_model == POWERPC_EXCP_BOOKE) {
6856 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
6857 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
6858 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
6859 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
6861 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
6862 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
6863 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
6864 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
6866 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
6867 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
6868 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
6869 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
6871 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
6872 " EPR " TARGET_FMT_lx "\n",
6873 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
6874 env->spr[SPR_BOOKE_EPR]);
6876 /* FSL-specific */
6877 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
6878 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
6879 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
6880 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
6883 * IVORs are left out as they are large and do not change often --
6884 * they can be read with "p $ivor0", "p $ivor1", etc.
6888 #if defined(TARGET_PPC64)
6889 if (env->flags & POWERPC_FLAG_CFAR) {
6890 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar);
6892 #endif
6894 switch (env->mmu_model) {
6895 case POWERPC_MMU_32B:
6896 case POWERPC_MMU_601:
6897 case POWERPC_MMU_SOFT_6xx:
6898 case POWERPC_MMU_SOFT_74xx:
6899 #if defined(TARGET_PPC64)
6900 case POWERPC_MMU_64B:
6901 case POWERPC_MMU_2_03:
6902 case POWERPC_MMU_2_06:
6903 case POWERPC_MMU_2_06a:
6904 case POWERPC_MMU_2_07:
6905 case POWERPC_MMU_2_07a:
6906 #endif
6907 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " DAR " TARGET_FMT_lx
6908 " DSISR " TARGET_FMT_lx "\n", env->spr[SPR_SDR1],
6909 env->spr[SPR_DAR], env->spr[SPR_DSISR]);
6910 break;
6911 case POWERPC_MMU_BOOKE206:
6912 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
6913 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
6914 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
6915 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
6917 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
6918 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
6919 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
6920 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
6922 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
6923 " TLB1CFG " TARGET_FMT_lx "\n",
6924 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
6925 env->spr[SPR_BOOKE_TLB1CFG]);
6926 break;
6927 default:
6928 break;
6930 #endif
6932 #undef RGPL
6933 #undef RFPL
6936 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f,
6937 fprintf_function cpu_fprintf, int flags)
6939 #if defined(DO_PPC_STATISTICS)
6940 PowerPCCPU *cpu = POWERPC_CPU(cs);
6941 opc_handler_t **t1, **t2, **t3, *handler;
6942 int op1, op2, op3;
6944 t1 = cpu->env.opcodes;
6945 for (op1 = 0; op1 < 64; op1++) {
6946 handler = t1[op1];
6947 if (is_indirect_opcode(handler)) {
6948 t2 = ind_table(handler);
6949 for (op2 = 0; op2 < 32; op2++) {
6950 handler = t2[op2];
6951 if (is_indirect_opcode(handler)) {
6952 t3 = ind_table(handler);
6953 for (op3 = 0; op3 < 32; op3++) {
6954 handler = t3[op3];
6955 if (handler->count == 0)
6956 continue;
6957 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
6958 "%016" PRIx64 " %" PRId64 "\n",
6959 op1, op2, op3, op1, (op3 << 5) | op2,
6960 handler->oname,
6961 handler->count, handler->count);
6963 } else {
6964 if (handler->count == 0)
6965 continue;
6966 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
6967 "%016" PRIx64 " %" PRId64 "\n",
6968 op1, op2, op1, op2, handler->oname,
6969 handler->count, handler->count);
6972 } else {
6973 if (handler->count == 0)
6974 continue;
6975 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
6976 " %" PRId64 "\n",
6977 op1, op1, handler->oname,
6978 handler->count, handler->count);
6981 #endif
6984 /*****************************************************************************/
6985 void gen_intermediate_code(CPUPPCState *env, struct TranslationBlock *tb)
6987 PowerPCCPU *cpu = ppc_env_get_cpu(env);
6988 CPUState *cs = CPU(cpu);
6989 DisasContext ctx, *ctxp = &ctx;
6990 opc_handler_t **table, *handler;
6991 target_ulong pc_start;
6992 int num_insns;
6993 int max_insns;
6995 pc_start = tb->pc;
6996 ctx.nip = pc_start;
6997 ctx.tb = tb;
6998 ctx.exception = POWERPC_EXCP_NONE;
6999 ctx.spr_cb = env->spr_cb;
7000 ctx.pr = msr_pr;
7001 ctx.mem_idx = env->dmmu_idx;
7002 ctx.dr = msr_dr;
7003 #if !defined(CONFIG_USER_ONLY)
7004 ctx.hv = msr_hv || !env->has_hv_mode;
7005 #endif
7006 ctx.insns_flags = env->insns_flags;
7007 ctx.insns_flags2 = env->insns_flags2;
7008 ctx.access_type = -1;
7009 ctx.need_access_type = !(env->mmu_model & POWERPC_MMU_64B);
7010 ctx.le_mode = !!(env->hflags & (1 << MSR_LE));
7011 ctx.default_tcg_memop_mask = ctx.le_mode ? MO_LE : MO_BE;
7012 #if defined(TARGET_PPC64)
7013 ctx.sf_mode = msr_is_64bit(env, env->msr);
7014 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7015 #endif
7016 if (env->mmu_model == POWERPC_MMU_32B ||
7017 env->mmu_model == POWERPC_MMU_601 ||
7018 (env->mmu_model & POWERPC_MMU_64B))
7019 ctx.lazy_tlb_flush = true;
7021 ctx.fpu_enabled = !!msr_fp;
7022 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
7023 ctx.spe_enabled = !!msr_spe;
7024 else
7025 ctx.spe_enabled = false;
7026 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
7027 ctx.altivec_enabled = !!msr_vr;
7028 else
7029 ctx.altivec_enabled = false;
7030 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) {
7031 ctx.vsx_enabled = !!msr_vsx;
7032 } else {
7033 ctx.vsx_enabled = false;
7035 #if defined(TARGET_PPC64)
7036 if ((env->flags & POWERPC_FLAG_TM) && msr_tm) {
7037 ctx.tm_enabled = !!msr_tm;
7038 } else {
7039 ctx.tm_enabled = false;
7041 #endif
7042 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
7043 ctx.singlestep_enabled = CPU_SINGLE_STEP;
7044 else
7045 ctx.singlestep_enabled = 0;
7046 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
7047 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
7048 if (unlikely(cs->singlestep_enabled)) {
7049 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
7051 #if defined (DO_SINGLE_STEP) && 0
7052 /* Single step trace mode */
7053 msr_se = 1;
7054 #endif
7055 num_insns = 0;
7056 max_insns = tb->cflags & CF_COUNT_MASK;
7057 if (max_insns == 0) {
7058 max_insns = CF_COUNT_MASK;
7060 if (max_insns > TCG_MAX_INSNS) {
7061 max_insns = TCG_MAX_INSNS;
7064 gen_tb_start(tb);
7065 tcg_clear_temp_count();
7066 /* Set env in case of segfault during code fetch */
7067 while (ctx.exception == POWERPC_EXCP_NONE && !tcg_op_buf_full()) {
7068 tcg_gen_insn_start(ctx.nip);
7069 num_insns++;
7071 if (unlikely(cpu_breakpoint_test(cs, ctx.nip, BP_ANY))) {
7072 gen_debug_exception(ctxp);
7073 /* The address covered by the breakpoint must be included in
7074 [tb->pc, tb->pc + tb->size) in order to for it to be
7075 properly cleared -- thus we increment the PC here so that
7076 the logic setting tb->size below does the right thing. */
7077 ctx.nip += 4;
7078 break;
7081 LOG_DISAS("----------------\n");
7082 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7083 ctx.nip, ctx.mem_idx, (int)msr_ir);
7084 if (num_insns == max_insns && (tb->cflags & CF_LAST_IO))
7085 gen_io_start();
7086 if (unlikely(need_byteswap(&ctx))) {
7087 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip));
7088 } else {
7089 ctx.opcode = cpu_ldl_code(env, ctx.nip);
7091 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7092 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
7093 opc3(ctx.opcode), opc4(ctx.opcode),
7094 ctx.le_mode ? "little" : "big");
7095 ctx.nip += 4;
7096 table = env->opcodes;
7097 handler = table[opc1(ctx.opcode)];
7098 if (is_indirect_opcode(handler)) {
7099 table = ind_table(handler);
7100 handler = table[opc2(ctx.opcode)];
7101 if (is_indirect_opcode(handler)) {
7102 table = ind_table(handler);
7103 handler = table[opc3(ctx.opcode)];
7104 if (is_indirect_opcode(handler)) {
7105 table = ind_table(handler);
7106 handler = table[opc4(ctx.opcode)];
7110 /* Is opcode *REALLY* valid ? */
7111 if (unlikely(handler->handler == &gen_invalid)) {
7112 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7113 "%02x - %02x - %02x - %02x (%08x) "
7114 TARGET_FMT_lx " %d\n",
7115 opc1(ctx.opcode), opc2(ctx.opcode),
7116 opc3(ctx.opcode), opc4(ctx.opcode),
7117 ctx.opcode, ctx.nip - 4, (int)msr_ir);
7118 } else {
7119 uint32_t inval;
7121 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) {
7122 inval = handler->inval2;
7123 } else {
7124 inval = handler->inval1;
7127 if (unlikely((ctx.opcode & inval) != 0)) {
7128 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7129 "%02x - %02x - %02x - %02x (%08x) "
7130 TARGET_FMT_lx "\n", ctx.opcode & inval,
7131 opc1(ctx.opcode), opc2(ctx.opcode),
7132 opc3(ctx.opcode), opc4(ctx.opcode),
7133 ctx.opcode, ctx.nip - 4);
7134 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
7135 break;
7138 (*(handler->handler))(&ctx);
7139 #if defined(DO_PPC_STATISTICS)
7140 handler->count++;
7141 #endif
7142 /* Check trace mode exceptions */
7143 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
7144 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
7145 ctx.exception != POWERPC_SYSCALL &&
7146 ctx.exception != POWERPC_EXCP_TRAP &&
7147 ctx.exception != POWERPC_EXCP_BRANCH)) {
7148 gen_exception_nip(ctxp, POWERPC_EXCP_TRACE, ctx.nip);
7149 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
7150 (cs->singlestep_enabled) ||
7151 singlestep ||
7152 num_insns >= max_insns)) {
7153 /* if we reach a page boundary or are single stepping, stop
7154 * generation
7156 break;
7158 if (tcg_check_temp_count()) {
7159 fprintf(stderr, "Opcode %02x %02x %02x %02x (%08x) leaked "
7160 "temporaries\n", opc1(ctx.opcode), opc2(ctx.opcode),
7161 opc3(ctx.opcode), opc4(ctx.opcode), ctx.opcode);
7162 exit(1);
7165 if (tb->cflags & CF_LAST_IO)
7166 gen_io_end();
7167 if (ctx.exception == POWERPC_EXCP_NONE) {
7168 gen_goto_tb(&ctx, 0, ctx.nip);
7169 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
7170 if (unlikely(cs->singlestep_enabled)) {
7171 gen_debug_exception(ctxp);
7173 /* Generate the return instruction */
7174 tcg_gen_exit_tb(0);
7176 gen_tb_end(tb, num_insns);
7178 tb->size = ctx.nip - pc_start;
7179 tb->icount = num_insns;
7181 #if defined(DEBUG_DISAS)
7182 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
7183 && qemu_log_in_addr_range(pc_start)) {
7184 int flags;
7185 flags = env->bfd_mach;
7186 flags |= ctx.le_mode << 16;
7187 qemu_log_lock();
7188 qemu_log("IN: %s\n", lookup_symbol(pc_start));
7189 log_target_disas(cs, pc_start, ctx.nip - pc_start, flags);
7190 qemu_log("\n");
7191 qemu_log_unlock();
7193 #endif
7196 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
7197 target_ulong *data)
7199 env->nip = data[0];