target-ppc: optimize fabs, fnabs, fneg
[qemu/kevin.git] / target-ppc / translate.c
blob6cc78933b51f84442d212653ec8dd6e783c26a3e
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "cpu.h"
22 #include "disas/disas.h"
23 #include "tcg-op.h"
24 #include "qemu/host-utils.h"
26 #include "helper.h"
27 #define GEN_HELPER 1
28 #include "helper.h"
30 #define CPU_SINGLE_STEP 0x1
31 #define CPU_BRANCH_STEP 0x2
32 #define GDBSTUB_SINGLE_STEP 0x4
34 /* Include definitions for instructions classes and implementations flags */
35 //#define PPC_DEBUG_DISAS
36 //#define DO_PPC_STATISTICS
38 #ifdef PPC_DEBUG_DISAS
39 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
40 #else
41 # define LOG_DISAS(...) do { } while (0)
42 #endif
43 /*****************************************************************************/
44 /* Code translation helpers */
46 /* global register indexes */
47 static TCGv_ptr cpu_env;
48 static char cpu_reg_names[10*3 + 22*4 /* GPR */
49 #if !defined(TARGET_PPC64)
50 + 10*4 + 22*5 /* SPE GPRh */
51 #endif
52 + 10*4 + 22*5 /* FPR */
53 + 2*(10*6 + 22*7) /* AVRh, AVRl */
54 + 8*5 /* CRF */];
55 static TCGv cpu_gpr[32];
56 #if !defined(TARGET_PPC64)
57 static TCGv cpu_gprh[32];
58 #endif
59 static TCGv_i64 cpu_fpr[32];
60 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
61 static TCGv_i32 cpu_crf[8];
62 static TCGv cpu_nip;
63 static TCGv cpu_msr;
64 static TCGv cpu_ctr;
65 static TCGv cpu_lr;
66 #if defined(TARGET_PPC64)
67 static TCGv cpu_cfar;
68 #endif
69 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca;
70 static TCGv cpu_reserve;
71 static TCGv cpu_fpscr;
72 static TCGv_i32 cpu_access_type;
74 #include "exec/gen-icount.h"
76 void ppc_translate_init(void)
78 int i;
79 char* p;
80 size_t cpu_reg_names_size;
81 static int done_init = 0;
83 if (done_init)
84 return;
86 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
88 p = cpu_reg_names;
89 cpu_reg_names_size = sizeof(cpu_reg_names);
91 for (i = 0; i < 8; i++) {
92 snprintf(p, cpu_reg_names_size, "crf%d", i);
93 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
94 offsetof(CPUPPCState, crf[i]), p);
95 p += 5;
96 cpu_reg_names_size -= 5;
99 for (i = 0; i < 32; i++) {
100 snprintf(p, cpu_reg_names_size, "r%d", i);
101 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
102 offsetof(CPUPPCState, gpr[i]), p);
103 p += (i < 10) ? 3 : 4;
104 cpu_reg_names_size -= (i < 10) ? 3 : 4;
105 #if !defined(TARGET_PPC64)
106 snprintf(p, cpu_reg_names_size, "r%dH", i);
107 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
108 offsetof(CPUPPCState, gprh[i]), p);
109 p += (i < 10) ? 4 : 5;
110 cpu_reg_names_size -= (i < 10) ? 4 : 5;
111 #endif
113 snprintf(p, cpu_reg_names_size, "fp%d", i);
114 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
115 offsetof(CPUPPCState, fpr[i]), p);
116 p += (i < 10) ? 4 : 5;
117 cpu_reg_names_size -= (i < 10) ? 4 : 5;
119 snprintf(p, cpu_reg_names_size, "avr%dH", i);
120 #ifdef HOST_WORDS_BIGENDIAN
121 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
122 offsetof(CPUPPCState, avr[i].u64[0]), p);
123 #else
124 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
125 offsetof(CPUPPCState, avr[i].u64[1]), p);
126 #endif
127 p += (i < 10) ? 6 : 7;
128 cpu_reg_names_size -= (i < 10) ? 6 : 7;
130 snprintf(p, cpu_reg_names_size, "avr%dL", i);
131 #ifdef HOST_WORDS_BIGENDIAN
132 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
133 offsetof(CPUPPCState, avr[i].u64[1]), p);
134 #else
135 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
136 offsetof(CPUPPCState, avr[i].u64[0]), p);
137 #endif
138 p += (i < 10) ? 6 : 7;
139 cpu_reg_names_size -= (i < 10) ? 6 : 7;
142 cpu_nip = tcg_global_mem_new(TCG_AREG0,
143 offsetof(CPUPPCState, nip), "nip");
145 cpu_msr = tcg_global_mem_new(TCG_AREG0,
146 offsetof(CPUPPCState, msr), "msr");
148 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
149 offsetof(CPUPPCState, ctr), "ctr");
151 cpu_lr = tcg_global_mem_new(TCG_AREG0,
152 offsetof(CPUPPCState, lr), "lr");
154 #if defined(TARGET_PPC64)
155 cpu_cfar = tcg_global_mem_new(TCG_AREG0,
156 offsetof(CPUPPCState, cfar), "cfar");
157 #endif
159 cpu_xer = tcg_global_mem_new(TCG_AREG0,
160 offsetof(CPUPPCState, xer), "xer");
161 cpu_so = tcg_global_mem_new(TCG_AREG0,
162 offsetof(CPUPPCState, so), "SO");
163 cpu_ov = tcg_global_mem_new(TCG_AREG0,
164 offsetof(CPUPPCState, ov), "OV");
165 cpu_ca = tcg_global_mem_new(TCG_AREG0,
166 offsetof(CPUPPCState, ca), "CA");
168 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
169 offsetof(CPUPPCState, reserve_addr),
170 "reserve_addr");
172 cpu_fpscr = tcg_global_mem_new(TCG_AREG0,
173 offsetof(CPUPPCState, fpscr), "fpscr");
175 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
176 offsetof(CPUPPCState, access_type), "access_type");
178 /* register helpers */
179 #define GEN_HELPER 2
180 #include "helper.h"
182 done_init = 1;
185 /* internal defines */
186 typedef struct DisasContext {
187 struct TranslationBlock *tb;
188 target_ulong nip;
189 uint32_t opcode;
190 uint32_t exception;
191 /* Routine used to access memory */
192 int mem_idx;
193 int access_type;
194 /* Translation flags */
195 int le_mode;
196 #if defined(TARGET_PPC64)
197 int sf_mode;
198 int has_cfar;
199 #endif
200 int fpu_enabled;
201 int altivec_enabled;
202 int spe_enabled;
203 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
204 int singlestep_enabled;
205 } DisasContext;
207 /* True when active word size < size of target_long. */
208 #ifdef TARGET_PPC64
209 # define NARROW_MODE(C) (!(C)->sf_mode)
210 #else
211 # define NARROW_MODE(C) 0
212 #endif
214 struct opc_handler_t {
215 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
216 uint32_t inval1;
217 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
218 uint32_t inval2;
219 /* instruction type */
220 uint64_t type;
221 /* extended instruction type */
222 uint64_t type2;
223 /* handler */
224 void (*handler)(DisasContext *ctx);
225 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
226 const char *oname;
227 #endif
228 #if defined(DO_PPC_STATISTICS)
229 uint64_t count;
230 #endif
233 static inline void gen_reset_fpstatus(void)
235 gen_helper_reset_fpstatus(cpu_env);
238 static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
240 TCGv_i32 t0 = tcg_temp_new_i32();
242 if (set_fprf != 0) {
243 /* This case might be optimized later */
244 tcg_gen_movi_i32(t0, 1);
245 gen_helper_compute_fprf(t0, cpu_env, arg, t0);
246 if (unlikely(set_rc)) {
247 tcg_gen_mov_i32(cpu_crf[1], t0);
249 gen_helper_float_check_status(cpu_env);
250 } else if (unlikely(set_rc)) {
251 /* We always need to compute fpcc */
252 tcg_gen_movi_i32(t0, 0);
253 gen_helper_compute_fprf(t0, cpu_env, arg, t0);
254 tcg_gen_mov_i32(cpu_crf[1], t0);
257 tcg_temp_free_i32(t0);
260 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
262 if (ctx->access_type != access_type) {
263 tcg_gen_movi_i32(cpu_access_type, access_type);
264 ctx->access_type = access_type;
268 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
270 if (NARROW_MODE(ctx)) {
271 nip = (uint32_t)nip;
273 tcg_gen_movi_tl(cpu_nip, nip);
276 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
278 TCGv_i32 t0, t1;
279 if (ctx->exception == POWERPC_EXCP_NONE) {
280 gen_update_nip(ctx, ctx->nip);
282 t0 = tcg_const_i32(excp);
283 t1 = tcg_const_i32(error);
284 gen_helper_raise_exception_err(cpu_env, t0, t1);
285 tcg_temp_free_i32(t0);
286 tcg_temp_free_i32(t1);
287 ctx->exception = (excp);
290 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
292 TCGv_i32 t0;
293 if (ctx->exception == POWERPC_EXCP_NONE) {
294 gen_update_nip(ctx, ctx->nip);
296 t0 = tcg_const_i32(excp);
297 gen_helper_raise_exception(cpu_env, t0);
298 tcg_temp_free_i32(t0);
299 ctx->exception = (excp);
302 static inline void gen_debug_exception(DisasContext *ctx)
304 TCGv_i32 t0;
306 if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
307 (ctx->exception != POWERPC_EXCP_SYNC)) {
308 gen_update_nip(ctx, ctx->nip);
310 t0 = tcg_const_i32(EXCP_DEBUG);
311 gen_helper_raise_exception(cpu_env, t0);
312 tcg_temp_free_i32(t0);
315 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
317 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
320 /* Stop translation */
321 static inline void gen_stop_exception(DisasContext *ctx)
323 gen_update_nip(ctx, ctx->nip);
324 ctx->exception = POWERPC_EXCP_STOP;
327 /* No need to update nip here, as execution flow will change */
328 static inline void gen_sync_exception(DisasContext *ctx)
330 ctx->exception = POWERPC_EXCP_SYNC;
333 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
334 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
336 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
337 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
339 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
340 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
342 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
343 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
345 typedef struct opcode_t {
346 unsigned char opc1, opc2, opc3;
347 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
348 unsigned char pad[5];
349 #else
350 unsigned char pad[1];
351 #endif
352 opc_handler_t handler;
353 const char *oname;
354 } opcode_t;
356 /*****************************************************************************/
357 /*** Instruction decoding ***/
358 #define EXTRACT_HELPER(name, shift, nb) \
359 static inline uint32_t name(uint32_t opcode) \
361 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
364 #define EXTRACT_SHELPER(name, shift, nb) \
365 static inline int32_t name(uint32_t opcode) \
367 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
370 /* Opcode part 1 */
371 EXTRACT_HELPER(opc1, 26, 6);
372 /* Opcode part 2 */
373 EXTRACT_HELPER(opc2, 1, 5);
374 /* Opcode part 3 */
375 EXTRACT_HELPER(opc3, 6, 5);
376 /* Update Cr0 flags */
377 EXTRACT_HELPER(Rc, 0, 1);
378 /* Destination */
379 EXTRACT_HELPER(rD, 21, 5);
380 /* Source */
381 EXTRACT_HELPER(rS, 21, 5);
382 /* First operand */
383 EXTRACT_HELPER(rA, 16, 5);
384 /* Second operand */
385 EXTRACT_HELPER(rB, 11, 5);
386 /* Third operand */
387 EXTRACT_HELPER(rC, 6, 5);
388 /*** Get CRn ***/
389 EXTRACT_HELPER(crfD, 23, 3);
390 EXTRACT_HELPER(crfS, 18, 3);
391 EXTRACT_HELPER(crbD, 21, 5);
392 EXTRACT_HELPER(crbA, 16, 5);
393 EXTRACT_HELPER(crbB, 11, 5);
394 /* SPR / TBL */
395 EXTRACT_HELPER(_SPR, 11, 10);
396 static inline uint32_t SPR(uint32_t opcode)
398 uint32_t sprn = _SPR(opcode);
400 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
402 /*** Get constants ***/
403 EXTRACT_HELPER(IMM, 12, 8);
404 /* 16 bits signed immediate value */
405 EXTRACT_SHELPER(SIMM, 0, 16);
406 /* 16 bits unsigned immediate value */
407 EXTRACT_HELPER(UIMM, 0, 16);
408 /* 5 bits signed immediate value */
409 EXTRACT_HELPER(SIMM5, 16, 5);
410 /* 5 bits signed immediate value */
411 EXTRACT_HELPER(UIMM5, 16, 5);
412 /* Bit count */
413 EXTRACT_HELPER(NB, 11, 5);
414 /* Shift count */
415 EXTRACT_HELPER(SH, 11, 5);
416 /* Vector shift count */
417 EXTRACT_HELPER(VSH, 6, 4);
418 /* Mask start */
419 EXTRACT_HELPER(MB, 6, 5);
420 /* Mask end */
421 EXTRACT_HELPER(ME, 1, 5);
422 /* Trap operand */
423 EXTRACT_HELPER(TO, 21, 5);
425 EXTRACT_HELPER(CRM, 12, 8);
426 EXTRACT_HELPER(FM, 17, 8);
427 EXTRACT_HELPER(SR, 16, 4);
428 EXTRACT_HELPER(FPIMM, 12, 4);
430 /*** Jump target decoding ***/
431 /* Displacement */
432 EXTRACT_SHELPER(d, 0, 16);
433 /* Immediate address */
434 static inline target_ulong LI(uint32_t opcode)
436 return (opcode >> 0) & 0x03FFFFFC;
439 static inline uint32_t BD(uint32_t opcode)
441 return (opcode >> 0) & 0xFFFC;
444 EXTRACT_HELPER(BO, 21, 5);
445 EXTRACT_HELPER(BI, 16, 5);
446 /* Absolute/relative address */
447 EXTRACT_HELPER(AA, 1, 1);
448 /* Link */
449 EXTRACT_HELPER(LK, 0, 1);
451 /* Create a mask between <start> and <end> bits */
452 static inline target_ulong MASK(uint32_t start, uint32_t end)
454 target_ulong ret;
456 #if defined(TARGET_PPC64)
457 if (likely(start == 0)) {
458 ret = UINT64_MAX << (63 - end);
459 } else if (likely(end == 63)) {
460 ret = UINT64_MAX >> start;
462 #else
463 if (likely(start == 0)) {
464 ret = UINT32_MAX << (31 - end);
465 } else if (likely(end == 31)) {
466 ret = UINT32_MAX >> start;
468 #endif
469 else {
470 ret = (((target_ulong)(-1ULL)) >> (start)) ^
471 (((target_ulong)(-1ULL) >> (end)) >> 1);
472 if (unlikely(start > end))
473 return ~ret;
476 return ret;
479 /*****************************************************************************/
480 /* PowerPC instructions table */
482 #if defined(DO_PPC_STATISTICS)
483 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
485 .opc1 = op1, \
486 .opc2 = op2, \
487 .opc3 = op3, \
488 .pad = { 0, }, \
489 .handler = { \
490 .inval1 = invl, \
491 .type = _typ, \
492 .type2 = _typ2, \
493 .handler = &gen_##name, \
494 .oname = stringify(name), \
495 }, \
496 .oname = stringify(name), \
498 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
500 .opc1 = op1, \
501 .opc2 = op2, \
502 .opc3 = op3, \
503 .pad = { 0, }, \
504 .handler = { \
505 .inval1 = invl1, \
506 .inval2 = invl2, \
507 .type = _typ, \
508 .type2 = _typ2, \
509 .handler = &gen_##name, \
510 .oname = stringify(name), \
511 }, \
512 .oname = stringify(name), \
514 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
516 .opc1 = op1, \
517 .opc2 = op2, \
518 .opc3 = op3, \
519 .pad = { 0, }, \
520 .handler = { \
521 .inval1 = invl, \
522 .type = _typ, \
523 .type2 = _typ2, \
524 .handler = &gen_##name, \
525 .oname = onam, \
526 }, \
527 .oname = onam, \
529 #else
530 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
532 .opc1 = op1, \
533 .opc2 = op2, \
534 .opc3 = op3, \
535 .pad = { 0, }, \
536 .handler = { \
537 .inval1 = invl, \
538 .type = _typ, \
539 .type2 = _typ2, \
540 .handler = &gen_##name, \
541 }, \
542 .oname = stringify(name), \
544 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
546 .opc1 = op1, \
547 .opc2 = op2, \
548 .opc3 = op3, \
549 .pad = { 0, }, \
550 .handler = { \
551 .inval1 = invl1, \
552 .inval2 = invl2, \
553 .type = _typ, \
554 .type2 = _typ2, \
555 .handler = &gen_##name, \
556 }, \
557 .oname = stringify(name), \
559 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
561 .opc1 = op1, \
562 .opc2 = op2, \
563 .opc3 = op3, \
564 .pad = { 0, }, \
565 .handler = { \
566 .inval1 = invl, \
567 .type = _typ, \
568 .type2 = _typ2, \
569 .handler = &gen_##name, \
570 }, \
571 .oname = onam, \
573 #endif
575 /* SPR load/store helpers */
576 static inline void gen_load_spr(TCGv t, int reg)
578 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
581 static inline void gen_store_spr(int reg, TCGv t)
583 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
586 /* Invalid instruction */
587 static void gen_invalid(DisasContext *ctx)
589 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
592 static opc_handler_t invalid_handler = {
593 .inval1 = 0xFFFFFFFF,
594 .inval2 = 0xFFFFFFFF,
595 .type = PPC_NONE,
596 .type2 = PPC_NONE,
597 .handler = gen_invalid,
600 /*** Integer comparison ***/
602 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
604 TCGv t0 = tcg_temp_new();
605 TCGv_i32 t1 = tcg_temp_new_i32();
607 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
609 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1);
610 tcg_gen_trunc_tl_i32(t1, t0);
611 tcg_gen_shli_i32(t1, t1, CRF_LT);
612 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
614 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1);
615 tcg_gen_trunc_tl_i32(t1, t0);
616 tcg_gen_shli_i32(t1, t1, CRF_GT);
617 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
619 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1);
620 tcg_gen_trunc_tl_i32(t1, t0);
621 tcg_gen_shli_i32(t1, t1, CRF_EQ);
622 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
624 tcg_temp_free(t0);
625 tcg_temp_free_i32(t1);
628 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
630 TCGv t0 = tcg_const_tl(arg1);
631 gen_op_cmp(arg0, t0, s, crf);
632 tcg_temp_free(t0);
635 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
637 TCGv t0, t1;
638 t0 = tcg_temp_new();
639 t1 = tcg_temp_new();
640 if (s) {
641 tcg_gen_ext32s_tl(t0, arg0);
642 tcg_gen_ext32s_tl(t1, arg1);
643 } else {
644 tcg_gen_ext32u_tl(t0, arg0);
645 tcg_gen_ext32u_tl(t1, arg1);
647 gen_op_cmp(t0, t1, s, crf);
648 tcg_temp_free(t1);
649 tcg_temp_free(t0);
652 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
654 TCGv t0 = tcg_const_tl(arg1);
655 gen_op_cmp32(arg0, t0, s, crf);
656 tcg_temp_free(t0);
659 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
661 if (NARROW_MODE(ctx)) {
662 gen_op_cmpi32(reg, 0, 1, 0);
663 } else {
664 gen_op_cmpi(reg, 0, 1, 0);
668 /* cmp */
669 static void gen_cmp(DisasContext *ctx)
671 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
672 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
673 1, crfD(ctx->opcode));
674 } else {
675 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
676 1, crfD(ctx->opcode));
680 /* cmpi */
681 static void gen_cmpi(DisasContext *ctx)
683 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
684 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
685 1, crfD(ctx->opcode));
686 } else {
687 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
688 1, crfD(ctx->opcode));
692 /* cmpl */
693 static void gen_cmpl(DisasContext *ctx)
695 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
696 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
697 0, crfD(ctx->opcode));
698 } else {
699 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
700 0, crfD(ctx->opcode));
704 /* cmpli */
705 static void gen_cmpli(DisasContext *ctx)
707 if (NARROW_MODE(ctx) || !(ctx->opcode & 0x00200000)) {
708 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
709 0, crfD(ctx->opcode));
710 } else {
711 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
712 0, crfD(ctx->opcode));
716 /* isel (PowerPC 2.03 specification) */
717 static void gen_isel(DisasContext *ctx)
719 int l1, l2;
720 uint32_t bi = rC(ctx->opcode);
721 uint32_t mask;
722 TCGv_i32 t0;
724 l1 = gen_new_label();
725 l2 = gen_new_label();
727 mask = 1 << (3 - (bi & 0x03));
728 t0 = tcg_temp_new_i32();
729 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
730 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
731 if (rA(ctx->opcode) == 0)
732 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
733 else
734 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
735 tcg_gen_br(l2);
736 gen_set_label(l1);
737 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
738 gen_set_label(l2);
739 tcg_temp_free_i32(t0);
742 /*** Integer arithmetic ***/
744 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
745 TCGv arg1, TCGv arg2, int sub)
747 TCGv t0 = tcg_temp_new();
749 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
750 tcg_gen_xor_tl(t0, arg1, arg2);
751 if (sub) {
752 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
753 } else {
754 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
756 tcg_temp_free(t0);
757 if (NARROW_MODE(ctx)) {
758 tcg_gen_ext32s_tl(cpu_ov, cpu_ov);
760 tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1);
761 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
764 /* Common add function */
765 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
766 TCGv arg2, bool add_ca, bool compute_ca,
767 bool compute_ov, bool compute_rc0)
769 TCGv t0 = ret;
771 if (compute_ca || compute_ov) {
772 t0 = tcg_temp_new();
775 if (compute_ca) {
776 if (NARROW_MODE(ctx)) {
777 /* Caution: a non-obvious corner case of the spec is that we
778 must produce the *entire* 64-bit addition, but produce the
779 carry into bit 32. */
780 TCGv t1 = tcg_temp_new();
781 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
782 tcg_gen_add_tl(t0, arg1, arg2);
783 if (add_ca) {
784 tcg_gen_add_tl(t0, t0, cpu_ca);
786 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */
787 tcg_temp_free(t1);
788 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
789 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
790 } else {
791 TCGv zero = tcg_const_tl(0);
792 if (add_ca) {
793 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero);
794 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero);
795 } else {
796 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero);
798 tcg_temp_free(zero);
800 } else {
801 tcg_gen_add_tl(t0, arg1, arg2);
802 if (add_ca) {
803 tcg_gen_add_tl(t0, t0, cpu_ca);
807 if (compute_ov) {
808 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
810 if (unlikely(compute_rc0)) {
811 gen_set_Rc0(ctx, t0);
814 if (!TCGV_EQUAL(t0, ret)) {
815 tcg_gen_mov_tl(ret, t0);
816 tcg_temp_free(t0);
819 /* Add functions with two operands */
820 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
821 static void glue(gen_, name)(DisasContext *ctx) \
823 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
824 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
825 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
827 /* Add functions with one operand and one immediate */
828 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
829 add_ca, compute_ca, compute_ov) \
830 static void glue(gen_, name)(DisasContext *ctx) \
832 TCGv t0 = tcg_const_tl(const_val); \
833 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
834 cpu_gpr[rA(ctx->opcode)], t0, \
835 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
836 tcg_temp_free(t0); \
839 /* add add. addo addo. */
840 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
841 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
842 /* addc addc. addco addco. */
843 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
844 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
845 /* adde adde. addeo addeo. */
846 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
847 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
848 /* addme addme. addmeo addmeo. */
849 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
850 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
851 /* addze addze. addzeo addzeo.*/
852 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
853 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
854 /* addi */
855 static void gen_addi(DisasContext *ctx)
857 target_long simm = SIMM(ctx->opcode);
859 if (rA(ctx->opcode) == 0) {
860 /* li case */
861 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
862 } else {
863 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
864 cpu_gpr[rA(ctx->opcode)], simm);
867 /* addic addic.*/
868 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
870 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
871 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
872 c, 0, 1, 0, compute_rc0);
873 tcg_temp_free(c);
876 static void gen_addic(DisasContext *ctx)
878 gen_op_addic(ctx, 0);
881 static void gen_addic_(DisasContext *ctx)
883 gen_op_addic(ctx, 1);
886 /* addis */
887 static void gen_addis(DisasContext *ctx)
889 target_long simm = SIMM(ctx->opcode);
891 if (rA(ctx->opcode) == 0) {
892 /* lis case */
893 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
894 } else {
895 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
896 cpu_gpr[rA(ctx->opcode)], simm << 16);
900 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
901 TCGv arg2, int sign, int compute_ov)
903 int l1 = gen_new_label();
904 int l2 = gen_new_label();
905 TCGv_i32 t0 = tcg_temp_local_new_i32();
906 TCGv_i32 t1 = tcg_temp_local_new_i32();
908 tcg_gen_trunc_tl_i32(t0, arg1);
909 tcg_gen_trunc_tl_i32(t1, arg2);
910 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
911 if (sign) {
912 int l3 = gen_new_label();
913 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
914 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
915 gen_set_label(l3);
916 tcg_gen_div_i32(t0, t0, t1);
917 } else {
918 tcg_gen_divu_i32(t0, t0, t1);
920 if (compute_ov) {
921 tcg_gen_movi_tl(cpu_ov, 0);
923 tcg_gen_br(l2);
924 gen_set_label(l1);
925 if (sign) {
926 tcg_gen_sari_i32(t0, t0, 31);
927 } else {
928 tcg_gen_movi_i32(t0, 0);
930 if (compute_ov) {
931 tcg_gen_movi_tl(cpu_ov, 1);
932 tcg_gen_movi_tl(cpu_so, 1);
934 gen_set_label(l2);
935 tcg_gen_extu_i32_tl(ret, t0);
936 tcg_temp_free_i32(t0);
937 tcg_temp_free_i32(t1);
938 if (unlikely(Rc(ctx->opcode) != 0))
939 gen_set_Rc0(ctx, ret);
941 /* Div functions */
942 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
943 static void glue(gen_, name)(DisasContext *ctx) \
945 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
946 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
947 sign, compute_ov); \
949 /* divwu divwu. divwuo divwuo. */
950 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
951 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
952 /* divw divw. divwo divwo. */
953 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
954 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
955 #if defined(TARGET_PPC64)
956 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
957 TCGv arg2, int sign, int compute_ov)
959 int l1 = gen_new_label();
960 int l2 = gen_new_label();
962 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
963 if (sign) {
964 int l3 = gen_new_label();
965 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
966 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
967 gen_set_label(l3);
968 tcg_gen_div_i64(ret, arg1, arg2);
969 } else {
970 tcg_gen_divu_i64(ret, arg1, arg2);
972 if (compute_ov) {
973 tcg_gen_movi_tl(cpu_ov, 0);
975 tcg_gen_br(l2);
976 gen_set_label(l1);
977 if (sign) {
978 tcg_gen_sari_i64(ret, arg1, 63);
979 } else {
980 tcg_gen_movi_i64(ret, 0);
982 if (compute_ov) {
983 tcg_gen_movi_tl(cpu_ov, 1);
984 tcg_gen_movi_tl(cpu_so, 1);
986 gen_set_label(l2);
987 if (unlikely(Rc(ctx->opcode) != 0))
988 gen_set_Rc0(ctx, ret);
990 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
991 static void glue(gen_, name)(DisasContext *ctx) \
993 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
994 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
995 sign, compute_ov); \
997 /* divwu divwu. divwuo divwuo. */
998 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
999 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1000 /* divw divw. divwo divwo. */
1001 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1002 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1003 #endif
1005 /* mulhw mulhw. */
1006 static void gen_mulhw(DisasContext *ctx)
1008 TCGv_i32 t0 = tcg_temp_new_i32();
1009 TCGv_i32 t1 = tcg_temp_new_i32();
1011 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1012 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1013 tcg_gen_muls2_i32(t0, t1, t0, t1);
1014 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1015 tcg_temp_free_i32(t0);
1016 tcg_temp_free_i32(t1);
1017 if (unlikely(Rc(ctx->opcode) != 0))
1018 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1021 /* mulhwu mulhwu. */
1022 static void gen_mulhwu(DisasContext *ctx)
1024 TCGv_i32 t0 = tcg_temp_new_i32();
1025 TCGv_i32 t1 = tcg_temp_new_i32();
1027 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1028 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1029 tcg_gen_mulu2_i32(t0, t1, t0, t1);
1030 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1031 tcg_temp_free_i32(t0);
1032 tcg_temp_free_i32(t1);
1033 if (unlikely(Rc(ctx->opcode) != 0))
1034 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1037 /* mullw mullw. */
1038 static void gen_mullw(DisasContext *ctx)
1040 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1041 cpu_gpr[rB(ctx->opcode)]);
1042 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1043 if (unlikely(Rc(ctx->opcode) != 0))
1044 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1047 /* mullwo mullwo. */
1048 static void gen_mullwo(DisasContext *ctx)
1050 TCGv_i32 t0 = tcg_temp_new_i32();
1051 TCGv_i32 t1 = tcg_temp_new_i32();
1053 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1054 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1055 tcg_gen_muls2_i32(t0, t1, t0, t1);
1056 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
1058 tcg_gen_sari_i32(t0, t0, 31);
1059 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1060 tcg_gen_extu_i32_tl(cpu_ov, t0);
1061 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1063 tcg_temp_free_i32(t0);
1064 tcg_temp_free_i32(t1);
1065 if (unlikely(Rc(ctx->opcode) != 0))
1066 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1069 /* mulli */
1070 static void gen_mulli(DisasContext *ctx)
1072 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1073 SIMM(ctx->opcode));
1076 #if defined(TARGET_PPC64)
1077 /* mulhd mulhd. */
1078 static void gen_mulhd(DisasContext *ctx)
1080 TCGv lo = tcg_temp_new();
1081 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1082 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1083 tcg_temp_free(lo);
1084 if (unlikely(Rc(ctx->opcode) != 0)) {
1085 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1089 /* mulhdu mulhdu. */
1090 static void gen_mulhdu(DisasContext *ctx)
1092 TCGv lo = tcg_temp_new();
1093 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1094 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1095 tcg_temp_free(lo);
1096 if (unlikely(Rc(ctx->opcode) != 0)) {
1097 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1101 /* mulld mulld. */
1102 static void gen_mulld(DisasContext *ctx)
1104 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1105 cpu_gpr[rB(ctx->opcode)]);
1106 if (unlikely(Rc(ctx->opcode) != 0))
1107 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1110 /* mulldo mulldo. */
1111 static void gen_mulldo(DisasContext *ctx)
1113 gen_helper_mulldo(cpu_gpr[rD(ctx->opcode)], cpu_env,
1114 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1115 if (unlikely(Rc(ctx->opcode) != 0)) {
1116 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1119 #endif
1121 /* Common subf function */
1122 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1123 TCGv arg2, bool add_ca, bool compute_ca,
1124 bool compute_ov, bool compute_rc0)
1126 TCGv t0 = ret;
1128 if (compute_ca || compute_ov) {
1129 t0 = tcg_temp_new();
1132 if (compute_ca) {
1133 /* dest = ~arg1 + arg2 [+ ca]. */
1134 if (NARROW_MODE(ctx)) {
1135 /* Caution: a non-obvious corner case of the spec is that we
1136 must produce the *entire* 64-bit addition, but produce the
1137 carry into bit 32. */
1138 TCGv inv1 = tcg_temp_new();
1139 TCGv t1 = tcg_temp_new();
1140 tcg_gen_not_tl(inv1, arg1);
1141 if (add_ca) {
1142 tcg_gen_add_tl(t0, arg2, cpu_ca);
1143 } else {
1144 tcg_gen_addi_tl(t0, arg2, 1);
1146 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
1147 tcg_gen_add_tl(t0, t0, inv1);
1148 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
1149 tcg_temp_free(t1);
1150 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
1151 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
1152 } else if (add_ca) {
1153 TCGv zero, inv1 = tcg_temp_new();
1154 tcg_gen_not_tl(inv1, arg1);
1155 zero = tcg_const_tl(0);
1156 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1157 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1158 tcg_temp_free(zero);
1159 tcg_temp_free(inv1);
1160 } else {
1161 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1162 tcg_gen_sub_tl(t0, arg2, arg1);
1164 } else if (add_ca) {
1165 /* Since we're ignoring carry-out, we can simplify the
1166 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1167 tcg_gen_sub_tl(t0, arg2, arg1);
1168 tcg_gen_add_tl(t0, t0, cpu_ca);
1169 tcg_gen_subi_tl(t0, t0, 1);
1170 } else {
1171 tcg_gen_sub_tl(t0, arg2, arg1);
1174 if (compute_ov) {
1175 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1177 if (unlikely(compute_rc0)) {
1178 gen_set_Rc0(ctx, t0);
1181 if (!TCGV_EQUAL(t0, ret)) {
1182 tcg_gen_mov_tl(ret, t0);
1183 tcg_temp_free(t0);
1186 /* Sub functions with Two operands functions */
1187 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1188 static void glue(gen_, name)(DisasContext *ctx) \
1190 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1191 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1192 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1194 /* Sub functions with one operand and one immediate */
1195 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1196 add_ca, compute_ca, compute_ov) \
1197 static void glue(gen_, name)(DisasContext *ctx) \
1199 TCGv t0 = tcg_const_tl(const_val); \
1200 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1201 cpu_gpr[rA(ctx->opcode)], t0, \
1202 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1203 tcg_temp_free(t0); \
1205 /* subf subf. subfo subfo. */
1206 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1207 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1208 /* subfc subfc. subfco subfco. */
1209 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1210 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1211 /* subfe subfe. subfeo subfo. */
1212 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1213 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1214 /* subfme subfme. subfmeo subfmeo. */
1215 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1216 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1217 /* subfze subfze. subfzeo subfzeo.*/
1218 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1219 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1221 /* subfic */
1222 static void gen_subfic(DisasContext *ctx)
1224 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1225 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1226 c, 0, 1, 0, 0);
1227 tcg_temp_free(c);
1230 /* neg neg. nego nego. */
1231 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
1233 TCGv zero = tcg_const_tl(0);
1234 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1235 zero, 0, 0, compute_ov, Rc(ctx->opcode));
1236 tcg_temp_free(zero);
1239 static void gen_neg(DisasContext *ctx)
1241 gen_op_arith_neg(ctx, 0);
1244 static void gen_nego(DisasContext *ctx)
1246 gen_op_arith_neg(ctx, 1);
1249 /*** Integer logical ***/
1250 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1251 static void glue(gen_, name)(DisasContext *ctx) \
1253 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1254 cpu_gpr[rB(ctx->opcode)]); \
1255 if (unlikely(Rc(ctx->opcode) != 0)) \
1256 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1259 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1260 static void glue(gen_, name)(DisasContext *ctx) \
1262 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1263 if (unlikely(Rc(ctx->opcode) != 0)) \
1264 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1267 /* and & and. */
1268 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1269 /* andc & andc. */
1270 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1272 /* andi. */
1273 static void gen_andi_(DisasContext *ctx)
1275 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1276 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1279 /* andis. */
1280 static void gen_andis_(DisasContext *ctx)
1282 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1283 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1286 /* cntlzw */
1287 static void gen_cntlzw(DisasContext *ctx)
1289 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1290 if (unlikely(Rc(ctx->opcode) != 0))
1291 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1293 /* eqv & eqv. */
1294 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1295 /* extsb & extsb. */
1296 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1297 /* extsh & extsh. */
1298 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1299 /* nand & nand. */
1300 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1301 /* nor & nor. */
1302 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1304 /* or & or. */
1305 static void gen_or(DisasContext *ctx)
1307 int rs, ra, rb;
1309 rs = rS(ctx->opcode);
1310 ra = rA(ctx->opcode);
1311 rb = rB(ctx->opcode);
1312 /* Optimisation for mr. ri case */
1313 if (rs != ra || rs != rb) {
1314 if (rs != rb)
1315 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1316 else
1317 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1318 if (unlikely(Rc(ctx->opcode) != 0))
1319 gen_set_Rc0(ctx, cpu_gpr[ra]);
1320 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1321 gen_set_Rc0(ctx, cpu_gpr[rs]);
1322 #if defined(TARGET_PPC64)
1323 } else {
1324 int prio = 0;
1326 switch (rs) {
1327 case 1:
1328 /* Set process priority to low */
1329 prio = 2;
1330 break;
1331 case 6:
1332 /* Set process priority to medium-low */
1333 prio = 3;
1334 break;
1335 case 2:
1336 /* Set process priority to normal */
1337 prio = 4;
1338 break;
1339 #if !defined(CONFIG_USER_ONLY)
1340 case 31:
1341 if (ctx->mem_idx > 0) {
1342 /* Set process priority to very low */
1343 prio = 1;
1345 break;
1346 case 5:
1347 if (ctx->mem_idx > 0) {
1348 /* Set process priority to medium-hight */
1349 prio = 5;
1351 break;
1352 case 3:
1353 if (ctx->mem_idx > 0) {
1354 /* Set process priority to high */
1355 prio = 6;
1357 break;
1358 case 7:
1359 if (ctx->mem_idx > 1) {
1360 /* Set process priority to very high */
1361 prio = 7;
1363 break;
1364 #endif
1365 default:
1366 /* nop */
1367 break;
1369 if (prio) {
1370 TCGv t0 = tcg_temp_new();
1371 gen_load_spr(t0, SPR_PPR);
1372 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1373 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1374 gen_store_spr(SPR_PPR, t0);
1375 tcg_temp_free(t0);
1377 #endif
1380 /* orc & orc. */
1381 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1383 /* xor & xor. */
1384 static void gen_xor(DisasContext *ctx)
1386 /* Optimisation for "set to zero" case */
1387 if (rS(ctx->opcode) != rB(ctx->opcode))
1388 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1389 else
1390 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1391 if (unlikely(Rc(ctx->opcode) != 0))
1392 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1395 /* ori */
1396 static void gen_ori(DisasContext *ctx)
1398 target_ulong uimm = UIMM(ctx->opcode);
1400 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1401 /* NOP */
1402 /* XXX: should handle special NOPs for POWER series */
1403 return;
1405 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1408 /* oris */
1409 static void gen_oris(DisasContext *ctx)
1411 target_ulong uimm = UIMM(ctx->opcode);
1413 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1414 /* NOP */
1415 return;
1417 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1420 /* xori */
1421 static void gen_xori(DisasContext *ctx)
1423 target_ulong uimm = UIMM(ctx->opcode);
1425 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1426 /* NOP */
1427 return;
1429 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1432 /* xoris */
1433 static void gen_xoris(DisasContext *ctx)
1435 target_ulong uimm = UIMM(ctx->opcode);
1437 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1438 /* NOP */
1439 return;
1441 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1444 /* popcntb : PowerPC 2.03 specification */
1445 static void gen_popcntb(DisasContext *ctx)
1447 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1450 static void gen_popcntw(DisasContext *ctx)
1452 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1455 #if defined(TARGET_PPC64)
1456 /* popcntd: PowerPC 2.06 specification */
1457 static void gen_popcntd(DisasContext *ctx)
1459 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1461 #endif
1463 #if defined(TARGET_PPC64)
1464 /* extsw & extsw. */
1465 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1467 /* cntlzd */
1468 static void gen_cntlzd(DisasContext *ctx)
1470 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1471 if (unlikely(Rc(ctx->opcode) != 0))
1472 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1474 #endif
1476 /*** Integer rotate ***/
1478 /* rlwimi & rlwimi. */
1479 static void gen_rlwimi(DisasContext *ctx)
1481 uint32_t mb, me, sh;
1483 mb = MB(ctx->opcode);
1484 me = ME(ctx->opcode);
1485 sh = SH(ctx->opcode);
1486 if (likely(sh == 0 && mb == 0 && me == 31)) {
1487 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1488 } else {
1489 target_ulong mask;
1490 TCGv t1;
1491 TCGv t0 = tcg_temp_new();
1492 #if defined(TARGET_PPC64)
1493 TCGv_i32 t2 = tcg_temp_new_i32();
1494 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1495 tcg_gen_rotli_i32(t2, t2, sh);
1496 tcg_gen_extu_i32_i64(t0, t2);
1497 tcg_temp_free_i32(t2);
1498 #else
1499 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1500 #endif
1501 #if defined(TARGET_PPC64)
1502 mb += 32;
1503 me += 32;
1504 #endif
1505 mask = MASK(mb, me);
1506 t1 = tcg_temp_new();
1507 tcg_gen_andi_tl(t0, t0, mask);
1508 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1509 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1510 tcg_temp_free(t0);
1511 tcg_temp_free(t1);
1513 if (unlikely(Rc(ctx->opcode) != 0))
1514 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1517 /* rlwinm & rlwinm. */
1518 static void gen_rlwinm(DisasContext *ctx)
1520 uint32_t mb, me, sh;
1522 sh = SH(ctx->opcode);
1523 mb = MB(ctx->opcode);
1524 me = ME(ctx->opcode);
1526 if (likely(mb == 0 && me == (31 - sh))) {
1527 if (likely(sh == 0)) {
1528 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1529 } else {
1530 TCGv t0 = tcg_temp_new();
1531 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1532 tcg_gen_shli_tl(t0, t0, sh);
1533 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1534 tcg_temp_free(t0);
1536 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1537 TCGv t0 = tcg_temp_new();
1538 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1539 tcg_gen_shri_tl(t0, t0, mb);
1540 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1541 tcg_temp_free(t0);
1542 } else {
1543 TCGv t0 = tcg_temp_new();
1544 #if defined(TARGET_PPC64)
1545 TCGv_i32 t1 = tcg_temp_new_i32();
1546 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1547 tcg_gen_rotli_i32(t1, t1, sh);
1548 tcg_gen_extu_i32_i64(t0, t1);
1549 tcg_temp_free_i32(t1);
1550 #else
1551 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1552 #endif
1553 #if defined(TARGET_PPC64)
1554 mb += 32;
1555 me += 32;
1556 #endif
1557 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1558 tcg_temp_free(t0);
1560 if (unlikely(Rc(ctx->opcode) != 0))
1561 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1564 /* rlwnm & rlwnm. */
1565 static void gen_rlwnm(DisasContext *ctx)
1567 uint32_t mb, me;
1568 TCGv t0;
1569 #if defined(TARGET_PPC64)
1570 TCGv_i32 t1, t2;
1571 #endif
1573 mb = MB(ctx->opcode);
1574 me = ME(ctx->opcode);
1575 t0 = tcg_temp_new();
1576 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1577 #if defined(TARGET_PPC64)
1578 t1 = tcg_temp_new_i32();
1579 t2 = tcg_temp_new_i32();
1580 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1581 tcg_gen_trunc_i64_i32(t2, t0);
1582 tcg_gen_rotl_i32(t1, t1, t2);
1583 tcg_gen_extu_i32_i64(t0, t1);
1584 tcg_temp_free_i32(t1);
1585 tcg_temp_free_i32(t2);
1586 #else
1587 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1588 #endif
1589 if (unlikely(mb != 0 || me != 31)) {
1590 #if defined(TARGET_PPC64)
1591 mb += 32;
1592 me += 32;
1593 #endif
1594 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1595 } else {
1596 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1598 tcg_temp_free(t0);
1599 if (unlikely(Rc(ctx->opcode) != 0))
1600 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1603 #if defined(TARGET_PPC64)
1604 #define GEN_PPC64_R2(name, opc1, opc2) \
1605 static void glue(gen_, name##0)(DisasContext *ctx) \
1607 gen_##name(ctx, 0); \
1610 static void glue(gen_, name##1)(DisasContext *ctx) \
1612 gen_##name(ctx, 1); \
1614 #define GEN_PPC64_R4(name, opc1, opc2) \
1615 static void glue(gen_, name##0)(DisasContext *ctx) \
1617 gen_##name(ctx, 0, 0); \
1620 static void glue(gen_, name##1)(DisasContext *ctx) \
1622 gen_##name(ctx, 0, 1); \
1625 static void glue(gen_, name##2)(DisasContext *ctx) \
1627 gen_##name(ctx, 1, 0); \
1630 static void glue(gen_, name##3)(DisasContext *ctx) \
1632 gen_##name(ctx, 1, 1); \
1635 static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1636 uint32_t sh)
1638 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1639 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1640 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1641 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1642 } else {
1643 TCGv t0 = tcg_temp_new();
1644 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1645 if (likely(mb == 0 && me == 63)) {
1646 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1647 } else {
1648 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1650 tcg_temp_free(t0);
1652 if (unlikely(Rc(ctx->opcode) != 0))
1653 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1655 /* rldicl - rldicl. */
1656 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1658 uint32_t sh, mb;
1660 sh = SH(ctx->opcode) | (shn << 5);
1661 mb = MB(ctx->opcode) | (mbn << 5);
1662 gen_rldinm(ctx, mb, 63, sh);
1664 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1665 /* rldicr - rldicr. */
1666 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1668 uint32_t sh, me;
1670 sh = SH(ctx->opcode) | (shn << 5);
1671 me = MB(ctx->opcode) | (men << 5);
1672 gen_rldinm(ctx, 0, me, sh);
1674 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1675 /* rldic - rldic. */
1676 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1678 uint32_t sh, mb;
1680 sh = SH(ctx->opcode) | (shn << 5);
1681 mb = MB(ctx->opcode) | (mbn << 5);
1682 gen_rldinm(ctx, mb, 63 - sh, sh);
1684 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1686 static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1688 TCGv t0;
1690 mb = MB(ctx->opcode);
1691 me = ME(ctx->opcode);
1692 t0 = tcg_temp_new();
1693 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1694 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1695 if (unlikely(mb != 0 || me != 63)) {
1696 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1697 } else {
1698 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1700 tcg_temp_free(t0);
1701 if (unlikely(Rc(ctx->opcode) != 0))
1702 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1705 /* rldcl - rldcl. */
1706 static inline void gen_rldcl(DisasContext *ctx, int mbn)
1708 uint32_t mb;
1710 mb = MB(ctx->opcode) | (mbn << 5);
1711 gen_rldnm(ctx, mb, 63);
1713 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1714 /* rldcr - rldcr. */
1715 static inline void gen_rldcr(DisasContext *ctx, int men)
1717 uint32_t me;
1719 me = MB(ctx->opcode) | (men << 5);
1720 gen_rldnm(ctx, 0, me);
1722 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1723 /* rldimi - rldimi. */
1724 static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1726 uint32_t sh, mb, me;
1728 sh = SH(ctx->opcode) | (shn << 5);
1729 mb = MB(ctx->opcode) | (mbn << 5);
1730 me = 63 - sh;
1731 if (unlikely(sh == 0 && mb == 0)) {
1732 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1733 } else {
1734 TCGv t0, t1;
1735 target_ulong mask;
1737 t0 = tcg_temp_new();
1738 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1739 t1 = tcg_temp_new();
1740 mask = MASK(mb, me);
1741 tcg_gen_andi_tl(t0, t0, mask);
1742 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1743 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1744 tcg_temp_free(t0);
1745 tcg_temp_free(t1);
1747 if (unlikely(Rc(ctx->opcode) != 0))
1748 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1750 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1751 #endif
1753 /*** Integer shift ***/
1755 /* slw & slw. */
1756 static void gen_slw(DisasContext *ctx)
1758 TCGv t0, t1;
1760 t0 = tcg_temp_new();
1761 /* AND rS with a mask that is 0 when rB >= 0x20 */
1762 #if defined(TARGET_PPC64)
1763 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1764 tcg_gen_sari_tl(t0, t0, 0x3f);
1765 #else
1766 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1767 tcg_gen_sari_tl(t0, t0, 0x1f);
1768 #endif
1769 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1770 t1 = tcg_temp_new();
1771 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1772 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1773 tcg_temp_free(t1);
1774 tcg_temp_free(t0);
1775 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1776 if (unlikely(Rc(ctx->opcode) != 0))
1777 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1780 /* sraw & sraw. */
1781 static void gen_sraw(DisasContext *ctx)
1783 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
1784 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1785 if (unlikely(Rc(ctx->opcode) != 0))
1786 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1789 /* srawi & srawi. */
1790 static void gen_srawi(DisasContext *ctx)
1792 int sh = SH(ctx->opcode);
1793 TCGv dst = cpu_gpr[rA(ctx->opcode)];
1794 TCGv src = cpu_gpr[rS(ctx->opcode)];
1795 if (sh == 0) {
1796 tcg_gen_mov_tl(dst, src);
1797 tcg_gen_movi_tl(cpu_ca, 0);
1798 } else {
1799 TCGv t0;
1800 tcg_gen_ext32s_tl(dst, src);
1801 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
1802 t0 = tcg_temp_new();
1803 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
1804 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1805 tcg_temp_free(t0);
1806 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1807 tcg_gen_sari_tl(dst, dst, sh);
1809 if (unlikely(Rc(ctx->opcode) != 0)) {
1810 gen_set_Rc0(ctx, dst);
1814 /* srw & srw. */
1815 static void gen_srw(DisasContext *ctx)
1817 TCGv t0, t1;
1819 t0 = tcg_temp_new();
1820 /* AND rS with a mask that is 0 when rB >= 0x20 */
1821 #if defined(TARGET_PPC64)
1822 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1823 tcg_gen_sari_tl(t0, t0, 0x3f);
1824 #else
1825 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1826 tcg_gen_sari_tl(t0, t0, 0x1f);
1827 #endif
1828 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1829 tcg_gen_ext32u_tl(t0, t0);
1830 t1 = tcg_temp_new();
1831 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1832 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1833 tcg_temp_free(t1);
1834 tcg_temp_free(t0);
1835 if (unlikely(Rc(ctx->opcode) != 0))
1836 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1839 #if defined(TARGET_PPC64)
1840 /* sld & sld. */
1841 static void gen_sld(DisasContext *ctx)
1843 TCGv t0, t1;
1845 t0 = tcg_temp_new();
1846 /* AND rS with a mask that is 0 when rB >= 0x40 */
1847 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1848 tcg_gen_sari_tl(t0, t0, 0x3f);
1849 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1850 t1 = tcg_temp_new();
1851 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1852 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1853 tcg_temp_free(t1);
1854 tcg_temp_free(t0);
1855 if (unlikely(Rc(ctx->opcode) != 0))
1856 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1859 /* srad & srad. */
1860 static void gen_srad(DisasContext *ctx)
1862 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
1863 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1864 if (unlikely(Rc(ctx->opcode) != 0))
1865 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1867 /* sradi & sradi. */
1868 static inline void gen_sradi(DisasContext *ctx, int n)
1870 int sh = SH(ctx->opcode) + (n << 5);
1871 TCGv dst = cpu_gpr[rA(ctx->opcode)];
1872 TCGv src = cpu_gpr[rS(ctx->opcode)];
1873 if (sh == 0) {
1874 tcg_gen_mov_tl(dst, src);
1875 tcg_gen_movi_tl(cpu_ca, 0);
1876 } else {
1877 TCGv t0;
1878 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
1879 t0 = tcg_temp_new();
1880 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
1881 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1882 tcg_temp_free(t0);
1883 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1884 tcg_gen_sari_tl(dst, src, sh);
1886 if (unlikely(Rc(ctx->opcode) != 0)) {
1887 gen_set_Rc0(ctx, dst);
1891 static void gen_sradi0(DisasContext *ctx)
1893 gen_sradi(ctx, 0);
1896 static void gen_sradi1(DisasContext *ctx)
1898 gen_sradi(ctx, 1);
1901 /* srd & srd. */
1902 static void gen_srd(DisasContext *ctx)
1904 TCGv t0, t1;
1906 t0 = tcg_temp_new();
1907 /* AND rS with a mask that is 0 when rB >= 0x40 */
1908 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1909 tcg_gen_sari_tl(t0, t0, 0x3f);
1910 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1911 t1 = tcg_temp_new();
1912 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1913 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1914 tcg_temp_free(t1);
1915 tcg_temp_free(t0);
1916 if (unlikely(Rc(ctx->opcode) != 0))
1917 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1919 #endif
1921 /*** Floating-Point arithmetic ***/
1922 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1923 static void gen_f##name(DisasContext *ctx) \
1925 if (unlikely(!ctx->fpu_enabled)) { \
1926 gen_exception(ctx, POWERPC_EXCP_FPU); \
1927 return; \
1929 /* NIP cannot be restored if the memory exception comes from an helper */ \
1930 gen_update_nip(ctx, ctx->nip - 4); \
1931 gen_reset_fpstatus(); \
1932 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1933 cpu_fpr[rA(ctx->opcode)], \
1934 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1935 if (isfloat) { \
1936 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1937 cpu_fpr[rD(ctx->opcode)]); \
1939 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
1940 Rc(ctx->opcode) != 0); \
1943 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1944 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1945 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1947 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1948 static void gen_f##name(DisasContext *ctx) \
1950 if (unlikely(!ctx->fpu_enabled)) { \
1951 gen_exception(ctx, POWERPC_EXCP_FPU); \
1952 return; \
1954 /* NIP cannot be restored if the memory exception comes from an helper */ \
1955 gen_update_nip(ctx, ctx->nip - 4); \
1956 gen_reset_fpstatus(); \
1957 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1958 cpu_fpr[rA(ctx->opcode)], \
1959 cpu_fpr[rB(ctx->opcode)]); \
1960 if (isfloat) { \
1961 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1962 cpu_fpr[rD(ctx->opcode)]); \
1964 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
1965 set_fprf, Rc(ctx->opcode) != 0); \
1967 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
1968 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1969 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1971 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1972 static void gen_f##name(DisasContext *ctx) \
1974 if (unlikely(!ctx->fpu_enabled)) { \
1975 gen_exception(ctx, POWERPC_EXCP_FPU); \
1976 return; \
1978 /* NIP cannot be restored if the memory exception comes from an helper */ \
1979 gen_update_nip(ctx, ctx->nip - 4); \
1980 gen_reset_fpstatus(); \
1981 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1982 cpu_fpr[rA(ctx->opcode)], \
1983 cpu_fpr[rC(ctx->opcode)]); \
1984 if (isfloat) { \
1985 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1986 cpu_fpr[rD(ctx->opcode)]); \
1988 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
1989 set_fprf, Rc(ctx->opcode) != 0); \
1991 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
1992 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1993 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1995 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
1996 static void gen_f##name(DisasContext *ctx) \
1998 if (unlikely(!ctx->fpu_enabled)) { \
1999 gen_exception(ctx, POWERPC_EXCP_FPU); \
2000 return; \
2002 /* NIP cannot be restored if the memory exception comes from an helper */ \
2003 gen_update_nip(ctx, ctx->nip - 4); \
2004 gen_reset_fpstatus(); \
2005 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2006 cpu_fpr[rB(ctx->opcode)]); \
2007 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2008 set_fprf, Rc(ctx->opcode) != 0); \
2011 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2012 static void gen_f##name(DisasContext *ctx) \
2014 if (unlikely(!ctx->fpu_enabled)) { \
2015 gen_exception(ctx, POWERPC_EXCP_FPU); \
2016 return; \
2018 /* NIP cannot be restored if the memory exception comes from an helper */ \
2019 gen_update_nip(ctx, ctx->nip - 4); \
2020 gen_reset_fpstatus(); \
2021 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2022 cpu_fpr[rB(ctx->opcode)]); \
2023 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2024 set_fprf, Rc(ctx->opcode) != 0); \
2027 /* fadd - fadds */
2028 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2029 /* fdiv - fdivs */
2030 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2031 /* fmul - fmuls */
2032 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2034 /* fre */
2035 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2037 /* fres */
2038 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2040 /* frsqrte */
2041 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2043 /* frsqrtes */
2044 static void gen_frsqrtes(DisasContext *ctx)
2046 if (unlikely(!ctx->fpu_enabled)) {
2047 gen_exception(ctx, POWERPC_EXCP_FPU);
2048 return;
2050 /* NIP cannot be restored if the memory exception comes from an helper */
2051 gen_update_nip(ctx, ctx->nip - 4);
2052 gen_reset_fpstatus();
2053 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_env,
2054 cpu_fpr[rB(ctx->opcode)]);
2055 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2056 cpu_fpr[rD(ctx->opcode)]);
2057 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2060 /* fsel */
2061 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2062 /* fsub - fsubs */
2063 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2064 /* Optional: */
2066 /* fsqrt */
2067 static void gen_fsqrt(DisasContext *ctx)
2069 if (unlikely(!ctx->fpu_enabled)) {
2070 gen_exception(ctx, POWERPC_EXCP_FPU);
2071 return;
2073 /* NIP cannot be restored if the memory exception comes from an helper */
2074 gen_update_nip(ctx, ctx->nip - 4);
2075 gen_reset_fpstatus();
2076 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2077 cpu_fpr[rB(ctx->opcode)]);
2078 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2081 static void gen_fsqrts(DisasContext *ctx)
2083 if (unlikely(!ctx->fpu_enabled)) {
2084 gen_exception(ctx, POWERPC_EXCP_FPU);
2085 return;
2087 /* NIP cannot be restored if the memory exception comes from an helper */
2088 gen_update_nip(ctx, ctx->nip - 4);
2089 gen_reset_fpstatus();
2090 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2091 cpu_fpr[rB(ctx->opcode)]);
2092 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2093 cpu_fpr[rD(ctx->opcode)]);
2094 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2097 /*** Floating-Point multiply-and-add ***/
2098 /* fmadd - fmadds */
2099 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2100 /* fmsub - fmsubs */
2101 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2102 /* fnmadd - fnmadds */
2103 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2104 /* fnmsub - fnmsubs */
2105 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2107 /*** Floating-Point round & convert ***/
2108 /* fctiw */
2109 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2110 /* fctiwz */
2111 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2112 /* frsp */
2113 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2114 #if defined(TARGET_PPC64)
2115 /* fcfid */
2116 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2117 /* fctid */
2118 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2119 /* fctidz */
2120 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2121 #endif
2123 /* frin */
2124 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2125 /* friz */
2126 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2127 /* frip */
2128 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2129 /* frim */
2130 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2132 /*** Floating-Point compare ***/
2134 /* fcmpo */
2135 static void gen_fcmpo(DisasContext *ctx)
2137 TCGv_i32 crf;
2138 if (unlikely(!ctx->fpu_enabled)) {
2139 gen_exception(ctx, POWERPC_EXCP_FPU);
2140 return;
2142 /* NIP cannot be restored if the memory exception comes from an helper */
2143 gen_update_nip(ctx, ctx->nip - 4);
2144 gen_reset_fpstatus();
2145 crf = tcg_const_i32(crfD(ctx->opcode));
2146 gen_helper_fcmpo(cpu_env, cpu_fpr[rA(ctx->opcode)],
2147 cpu_fpr[rB(ctx->opcode)], crf);
2148 tcg_temp_free_i32(crf);
2149 gen_helper_float_check_status(cpu_env);
2152 /* fcmpu */
2153 static void gen_fcmpu(DisasContext *ctx)
2155 TCGv_i32 crf;
2156 if (unlikely(!ctx->fpu_enabled)) {
2157 gen_exception(ctx, POWERPC_EXCP_FPU);
2158 return;
2160 /* NIP cannot be restored if the memory exception comes from an helper */
2161 gen_update_nip(ctx, ctx->nip - 4);
2162 gen_reset_fpstatus();
2163 crf = tcg_const_i32(crfD(ctx->opcode));
2164 gen_helper_fcmpu(cpu_env, cpu_fpr[rA(ctx->opcode)],
2165 cpu_fpr[rB(ctx->opcode)], crf);
2166 tcg_temp_free_i32(crf);
2167 gen_helper_float_check_status(cpu_env);
2170 /*** Floating-point move ***/
2171 /* fabs */
2172 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2173 static void gen_fabs(DisasContext *ctx)
2175 if (unlikely(!ctx->fpu_enabled)) {
2176 gen_exception(ctx, POWERPC_EXCP_FPU);
2177 return;
2179 tcg_gen_andi_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2180 ~(1ULL << 63));
2181 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2184 /* fmr - fmr. */
2185 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2186 static void gen_fmr(DisasContext *ctx)
2188 if (unlikely(!ctx->fpu_enabled)) {
2189 gen_exception(ctx, POWERPC_EXCP_FPU);
2190 return;
2192 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2193 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2196 /* fnabs */
2197 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2198 static void gen_fnabs(DisasContext *ctx)
2200 if (unlikely(!ctx->fpu_enabled)) {
2201 gen_exception(ctx, POWERPC_EXCP_FPU);
2202 return;
2204 tcg_gen_ori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2205 1ULL << 63);
2206 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2209 /* fneg */
2210 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2211 static void gen_fneg(DisasContext *ctx)
2213 if (unlikely(!ctx->fpu_enabled)) {
2214 gen_exception(ctx, POWERPC_EXCP_FPU);
2215 return;
2217 tcg_gen_xori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2218 1ULL << 63);
2219 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2222 /*** Floating-Point status & ctrl register ***/
2224 /* mcrfs */
2225 static void gen_mcrfs(DisasContext *ctx)
2227 TCGv tmp = tcg_temp_new();
2228 int bfa;
2230 if (unlikely(!ctx->fpu_enabled)) {
2231 gen_exception(ctx, POWERPC_EXCP_FPU);
2232 return;
2234 bfa = 4 * (7 - crfS(ctx->opcode));
2235 tcg_gen_shri_tl(tmp, cpu_fpscr, bfa);
2236 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
2237 tcg_temp_free(tmp);
2238 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2239 tcg_gen_andi_tl(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2242 /* mffs */
2243 static void gen_mffs(DisasContext *ctx)
2245 if (unlikely(!ctx->fpu_enabled)) {
2246 gen_exception(ctx, POWERPC_EXCP_FPU);
2247 return;
2249 gen_reset_fpstatus();
2250 tcg_gen_extu_tl_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2251 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2254 /* mtfsb0 */
2255 static void gen_mtfsb0(DisasContext *ctx)
2257 uint8_t crb;
2259 if (unlikely(!ctx->fpu_enabled)) {
2260 gen_exception(ctx, POWERPC_EXCP_FPU);
2261 return;
2263 crb = 31 - crbD(ctx->opcode);
2264 gen_reset_fpstatus();
2265 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2266 TCGv_i32 t0;
2267 /* NIP cannot be restored if the memory exception comes from an helper */
2268 gen_update_nip(ctx, ctx->nip - 4);
2269 t0 = tcg_const_i32(crb);
2270 gen_helper_fpscr_clrbit(cpu_env, t0);
2271 tcg_temp_free_i32(t0);
2273 if (unlikely(Rc(ctx->opcode) != 0)) {
2274 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2275 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2279 /* mtfsb1 */
2280 static void gen_mtfsb1(DisasContext *ctx)
2282 uint8_t crb;
2284 if (unlikely(!ctx->fpu_enabled)) {
2285 gen_exception(ctx, POWERPC_EXCP_FPU);
2286 return;
2288 crb = 31 - crbD(ctx->opcode);
2289 gen_reset_fpstatus();
2290 /* XXX: we pretend we can only do IEEE floating-point computations */
2291 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2292 TCGv_i32 t0;
2293 /* NIP cannot be restored if the memory exception comes from an helper */
2294 gen_update_nip(ctx, ctx->nip - 4);
2295 t0 = tcg_const_i32(crb);
2296 gen_helper_fpscr_setbit(cpu_env, t0);
2297 tcg_temp_free_i32(t0);
2299 if (unlikely(Rc(ctx->opcode) != 0)) {
2300 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2301 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2303 /* We can raise a differed exception */
2304 gen_helper_float_check_status(cpu_env);
2307 /* mtfsf */
2308 static void gen_mtfsf(DisasContext *ctx)
2310 TCGv_i32 t0;
2311 int L = ctx->opcode & 0x02000000;
2313 if (unlikely(!ctx->fpu_enabled)) {
2314 gen_exception(ctx, POWERPC_EXCP_FPU);
2315 return;
2317 /* NIP cannot be restored if the memory exception comes from an helper */
2318 gen_update_nip(ctx, ctx->nip - 4);
2319 gen_reset_fpstatus();
2320 if (L)
2321 t0 = tcg_const_i32(0xff);
2322 else
2323 t0 = tcg_const_i32(FM(ctx->opcode));
2324 gen_helper_store_fpscr(cpu_env, cpu_fpr[rB(ctx->opcode)], t0);
2325 tcg_temp_free_i32(t0);
2326 if (unlikely(Rc(ctx->opcode) != 0)) {
2327 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2328 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2330 /* We can raise a differed exception */
2331 gen_helper_float_check_status(cpu_env);
2334 /* mtfsfi */
2335 static void gen_mtfsfi(DisasContext *ctx)
2337 int bf, sh;
2338 TCGv_i64 t0;
2339 TCGv_i32 t1;
2341 if (unlikely(!ctx->fpu_enabled)) {
2342 gen_exception(ctx, POWERPC_EXCP_FPU);
2343 return;
2345 bf = crbD(ctx->opcode) >> 2;
2346 sh = 7 - bf;
2347 /* NIP cannot be restored if the memory exception comes from an helper */
2348 gen_update_nip(ctx, ctx->nip - 4);
2349 gen_reset_fpstatus();
2350 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2351 t1 = tcg_const_i32(1 << sh);
2352 gen_helper_store_fpscr(cpu_env, t0, t1);
2353 tcg_temp_free_i64(t0);
2354 tcg_temp_free_i32(t1);
2355 if (unlikely(Rc(ctx->opcode) != 0)) {
2356 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2357 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2359 /* We can raise a differed exception */
2360 gen_helper_float_check_status(cpu_env);
2363 /*** Addressing modes ***/
2364 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2365 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2366 target_long maskl)
2368 target_long simm = SIMM(ctx->opcode);
2370 simm &= ~maskl;
2371 if (rA(ctx->opcode) == 0) {
2372 if (NARROW_MODE(ctx)) {
2373 simm = (uint32_t)simm;
2375 tcg_gen_movi_tl(EA, simm);
2376 } else if (likely(simm != 0)) {
2377 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2378 if (NARROW_MODE(ctx)) {
2379 tcg_gen_ext32u_tl(EA, EA);
2381 } else {
2382 if (NARROW_MODE(ctx)) {
2383 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2384 } else {
2385 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2390 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2392 if (rA(ctx->opcode) == 0) {
2393 if (NARROW_MODE(ctx)) {
2394 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2395 } else {
2396 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2398 } else {
2399 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2400 if (NARROW_MODE(ctx)) {
2401 tcg_gen_ext32u_tl(EA, EA);
2406 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2408 if (rA(ctx->opcode) == 0) {
2409 tcg_gen_movi_tl(EA, 0);
2410 } else if (NARROW_MODE(ctx)) {
2411 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2412 } else {
2413 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2417 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2418 target_long val)
2420 tcg_gen_addi_tl(ret, arg1, val);
2421 if (NARROW_MODE(ctx)) {
2422 tcg_gen_ext32u_tl(ret, ret);
2426 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2428 int l1 = gen_new_label();
2429 TCGv t0 = tcg_temp_new();
2430 TCGv_i32 t1, t2;
2431 /* NIP cannot be restored if the memory exception comes from an helper */
2432 gen_update_nip(ctx, ctx->nip - 4);
2433 tcg_gen_andi_tl(t0, EA, mask);
2434 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2435 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2436 t2 = tcg_const_i32(0);
2437 gen_helper_raise_exception_err(cpu_env, t1, t2);
2438 tcg_temp_free_i32(t1);
2439 tcg_temp_free_i32(t2);
2440 gen_set_label(l1);
2441 tcg_temp_free(t0);
2444 /*** Integer load ***/
2445 static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2447 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2450 static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2452 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2455 static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2457 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2458 if (unlikely(ctx->le_mode)) {
2459 tcg_gen_bswap16_tl(arg1, arg1);
2463 static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2465 if (unlikely(ctx->le_mode)) {
2466 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2467 tcg_gen_bswap16_tl(arg1, arg1);
2468 tcg_gen_ext16s_tl(arg1, arg1);
2469 } else {
2470 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2474 static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2476 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2477 if (unlikely(ctx->le_mode)) {
2478 tcg_gen_bswap32_tl(arg1, arg1);
2482 #if defined(TARGET_PPC64)
2483 static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2485 if (unlikely(ctx->le_mode)) {
2486 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2487 tcg_gen_bswap32_tl(arg1, arg1);
2488 tcg_gen_ext32s_tl(arg1, arg1);
2489 } else
2490 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2492 #endif
2494 static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2496 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2497 if (unlikely(ctx->le_mode)) {
2498 tcg_gen_bswap64_i64(arg1, arg1);
2502 static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2504 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2507 static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2509 if (unlikely(ctx->le_mode)) {
2510 TCGv t0 = tcg_temp_new();
2511 tcg_gen_ext16u_tl(t0, arg1);
2512 tcg_gen_bswap16_tl(t0, t0);
2513 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2514 tcg_temp_free(t0);
2515 } else {
2516 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2520 static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2522 if (unlikely(ctx->le_mode)) {
2523 TCGv t0 = tcg_temp_new();
2524 tcg_gen_ext32u_tl(t0, arg1);
2525 tcg_gen_bswap32_tl(t0, t0);
2526 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2527 tcg_temp_free(t0);
2528 } else {
2529 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2533 static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2535 if (unlikely(ctx->le_mode)) {
2536 TCGv_i64 t0 = tcg_temp_new_i64();
2537 tcg_gen_bswap64_i64(t0, arg1);
2538 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2539 tcg_temp_free_i64(t0);
2540 } else
2541 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2544 #define GEN_LD(name, ldop, opc, type) \
2545 static void glue(gen_, name)(DisasContext *ctx) \
2547 TCGv EA; \
2548 gen_set_access_type(ctx, ACCESS_INT); \
2549 EA = tcg_temp_new(); \
2550 gen_addr_imm_index(ctx, EA, 0); \
2551 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2552 tcg_temp_free(EA); \
2555 #define GEN_LDU(name, ldop, opc, type) \
2556 static void glue(gen_, name##u)(DisasContext *ctx) \
2558 TCGv EA; \
2559 if (unlikely(rA(ctx->opcode) == 0 || \
2560 rA(ctx->opcode) == rD(ctx->opcode))) { \
2561 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2562 return; \
2564 gen_set_access_type(ctx, ACCESS_INT); \
2565 EA = tcg_temp_new(); \
2566 if (type == PPC_64B) \
2567 gen_addr_imm_index(ctx, EA, 0x03); \
2568 else \
2569 gen_addr_imm_index(ctx, EA, 0); \
2570 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2571 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2572 tcg_temp_free(EA); \
2575 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2576 static void glue(gen_, name##ux)(DisasContext *ctx) \
2578 TCGv EA; \
2579 if (unlikely(rA(ctx->opcode) == 0 || \
2580 rA(ctx->opcode) == rD(ctx->opcode))) { \
2581 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2582 return; \
2584 gen_set_access_type(ctx, ACCESS_INT); \
2585 EA = tcg_temp_new(); \
2586 gen_addr_reg_index(ctx, EA); \
2587 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2588 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2589 tcg_temp_free(EA); \
2592 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
2593 static void glue(gen_, name##x)(DisasContext *ctx) \
2595 TCGv EA; \
2596 gen_set_access_type(ctx, ACCESS_INT); \
2597 EA = tcg_temp_new(); \
2598 gen_addr_reg_index(ctx, EA); \
2599 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2600 tcg_temp_free(EA); \
2602 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2603 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE)
2605 #define GEN_LDS(name, ldop, op, type) \
2606 GEN_LD(name, ldop, op | 0x20, type); \
2607 GEN_LDU(name, ldop, op | 0x21, type); \
2608 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2609 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2611 /* lbz lbzu lbzux lbzx */
2612 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2613 /* lha lhau lhaux lhax */
2614 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2615 /* lhz lhzu lhzux lhzx */
2616 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2617 /* lwz lwzu lwzux lwzx */
2618 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2619 #if defined(TARGET_PPC64)
2620 /* lwaux */
2621 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2622 /* lwax */
2623 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2624 /* ldux */
2625 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2626 /* ldx */
2627 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2629 static void gen_ld(DisasContext *ctx)
2631 TCGv EA;
2632 if (Rc(ctx->opcode)) {
2633 if (unlikely(rA(ctx->opcode) == 0 ||
2634 rA(ctx->opcode) == rD(ctx->opcode))) {
2635 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2636 return;
2639 gen_set_access_type(ctx, ACCESS_INT);
2640 EA = tcg_temp_new();
2641 gen_addr_imm_index(ctx, EA, 0x03);
2642 if (ctx->opcode & 0x02) {
2643 /* lwa (lwau is undefined) */
2644 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2645 } else {
2646 /* ld - ldu */
2647 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2649 if (Rc(ctx->opcode))
2650 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2651 tcg_temp_free(EA);
2654 /* lq */
2655 static void gen_lq(DisasContext *ctx)
2657 #if defined(CONFIG_USER_ONLY)
2658 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2659 #else
2660 int ra, rd;
2661 TCGv EA;
2663 /* Restore CPU state */
2664 if (unlikely(ctx->mem_idx == 0)) {
2665 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2666 return;
2668 ra = rA(ctx->opcode);
2669 rd = rD(ctx->opcode);
2670 if (unlikely((rd & 1) || rd == ra)) {
2671 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2672 return;
2674 if (unlikely(ctx->le_mode)) {
2675 /* Little-endian mode is not handled */
2676 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2677 return;
2679 gen_set_access_type(ctx, ACCESS_INT);
2680 EA = tcg_temp_new();
2681 gen_addr_imm_index(ctx, EA, 0x0F);
2682 gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2683 gen_addr_add(ctx, EA, EA, 8);
2684 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2685 tcg_temp_free(EA);
2686 #endif
2688 #endif
2690 /*** Integer store ***/
2691 #define GEN_ST(name, stop, opc, type) \
2692 static void glue(gen_, name)(DisasContext *ctx) \
2694 TCGv EA; \
2695 gen_set_access_type(ctx, ACCESS_INT); \
2696 EA = tcg_temp_new(); \
2697 gen_addr_imm_index(ctx, EA, 0); \
2698 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2699 tcg_temp_free(EA); \
2702 #define GEN_STU(name, stop, opc, type) \
2703 static void glue(gen_, stop##u)(DisasContext *ctx) \
2705 TCGv EA; \
2706 if (unlikely(rA(ctx->opcode) == 0)) { \
2707 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2708 return; \
2710 gen_set_access_type(ctx, ACCESS_INT); \
2711 EA = tcg_temp_new(); \
2712 if (type == PPC_64B) \
2713 gen_addr_imm_index(ctx, EA, 0x03); \
2714 else \
2715 gen_addr_imm_index(ctx, EA, 0); \
2716 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2717 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2718 tcg_temp_free(EA); \
2721 #define GEN_STUX(name, stop, opc2, opc3, type) \
2722 static void glue(gen_, name##ux)(DisasContext *ctx) \
2724 TCGv EA; \
2725 if (unlikely(rA(ctx->opcode) == 0)) { \
2726 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2727 return; \
2729 gen_set_access_type(ctx, ACCESS_INT); \
2730 EA = tcg_temp_new(); \
2731 gen_addr_reg_index(ctx, EA); \
2732 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2733 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2734 tcg_temp_free(EA); \
2737 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
2738 static void glue(gen_, name##x)(DisasContext *ctx) \
2740 TCGv EA; \
2741 gen_set_access_type(ctx, ACCESS_INT); \
2742 EA = tcg_temp_new(); \
2743 gen_addr_reg_index(ctx, EA); \
2744 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2745 tcg_temp_free(EA); \
2747 #define GEN_STX(name, stop, opc2, opc3, type) \
2748 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE)
2750 #define GEN_STS(name, stop, op, type) \
2751 GEN_ST(name, stop, op | 0x20, type); \
2752 GEN_STU(name, stop, op | 0x21, type); \
2753 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2754 GEN_STX(name, stop, 0x17, op | 0x00, type)
2756 /* stb stbu stbux stbx */
2757 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2758 /* sth sthu sthux sthx */
2759 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2760 /* stw stwu stwux stwx */
2761 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2762 #if defined(TARGET_PPC64)
2763 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2764 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2766 static void gen_std(DisasContext *ctx)
2768 int rs;
2769 TCGv EA;
2771 rs = rS(ctx->opcode);
2772 if ((ctx->opcode & 0x3) == 0x2) {
2773 #if defined(CONFIG_USER_ONLY)
2774 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2775 #else
2776 /* stq */
2777 if (unlikely(ctx->mem_idx == 0)) {
2778 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2779 return;
2781 if (unlikely(rs & 1)) {
2782 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2783 return;
2785 if (unlikely(ctx->le_mode)) {
2786 /* Little-endian mode is not handled */
2787 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2788 return;
2790 gen_set_access_type(ctx, ACCESS_INT);
2791 EA = tcg_temp_new();
2792 gen_addr_imm_index(ctx, EA, 0x03);
2793 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2794 gen_addr_add(ctx, EA, EA, 8);
2795 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2796 tcg_temp_free(EA);
2797 #endif
2798 } else {
2799 /* std / stdu */
2800 if (Rc(ctx->opcode)) {
2801 if (unlikely(rA(ctx->opcode) == 0)) {
2802 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2803 return;
2806 gen_set_access_type(ctx, ACCESS_INT);
2807 EA = tcg_temp_new();
2808 gen_addr_imm_index(ctx, EA, 0x03);
2809 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2810 if (Rc(ctx->opcode))
2811 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2812 tcg_temp_free(EA);
2815 #endif
2816 /*** Integer load and store with byte reverse ***/
2817 /* lhbrx */
2818 static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2820 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2821 if (likely(!ctx->le_mode)) {
2822 tcg_gen_bswap16_tl(arg1, arg1);
2825 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2827 /* lwbrx */
2828 static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2830 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2831 if (likely(!ctx->le_mode)) {
2832 tcg_gen_bswap32_tl(arg1, arg1);
2835 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2837 #if defined(TARGET_PPC64)
2838 /* ldbrx */
2839 static inline void gen_qemu_ld64ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2841 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2842 if (likely(!ctx->le_mode)) {
2843 tcg_gen_bswap64_tl(arg1, arg1);
2846 GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX);
2847 #endif /* TARGET_PPC64 */
2849 /* sthbrx */
2850 static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2852 if (likely(!ctx->le_mode)) {
2853 TCGv t0 = tcg_temp_new();
2854 tcg_gen_ext16u_tl(t0, arg1);
2855 tcg_gen_bswap16_tl(t0, t0);
2856 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2857 tcg_temp_free(t0);
2858 } else {
2859 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2862 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2864 /* stwbrx */
2865 static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2867 if (likely(!ctx->le_mode)) {
2868 TCGv t0 = tcg_temp_new();
2869 tcg_gen_ext32u_tl(t0, arg1);
2870 tcg_gen_bswap32_tl(t0, t0);
2871 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2872 tcg_temp_free(t0);
2873 } else {
2874 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2877 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2879 #if defined(TARGET_PPC64)
2880 /* stdbrx */
2881 static inline void gen_qemu_st64r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2883 if (likely(!ctx->le_mode)) {
2884 TCGv t0 = tcg_temp_new();
2885 tcg_gen_bswap64_tl(t0, arg1);
2886 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2887 tcg_temp_free(t0);
2888 } else {
2889 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2892 GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX);
2893 #endif /* TARGET_PPC64 */
2895 /*** Integer load and store multiple ***/
2897 /* lmw */
2898 static void gen_lmw(DisasContext *ctx)
2900 TCGv t0;
2901 TCGv_i32 t1;
2902 gen_set_access_type(ctx, ACCESS_INT);
2903 /* NIP cannot be restored if the memory exception comes from an helper */
2904 gen_update_nip(ctx, ctx->nip - 4);
2905 t0 = tcg_temp_new();
2906 t1 = tcg_const_i32(rD(ctx->opcode));
2907 gen_addr_imm_index(ctx, t0, 0);
2908 gen_helper_lmw(cpu_env, t0, t1);
2909 tcg_temp_free(t0);
2910 tcg_temp_free_i32(t1);
2913 /* stmw */
2914 static void gen_stmw(DisasContext *ctx)
2916 TCGv t0;
2917 TCGv_i32 t1;
2918 gen_set_access_type(ctx, ACCESS_INT);
2919 /* NIP cannot be restored if the memory exception comes from an helper */
2920 gen_update_nip(ctx, ctx->nip - 4);
2921 t0 = tcg_temp_new();
2922 t1 = tcg_const_i32(rS(ctx->opcode));
2923 gen_addr_imm_index(ctx, t0, 0);
2924 gen_helper_stmw(cpu_env, t0, t1);
2925 tcg_temp_free(t0);
2926 tcg_temp_free_i32(t1);
2929 /*** Integer load and store strings ***/
2931 /* lswi */
2932 /* PowerPC32 specification says we must generate an exception if
2933 * rA is in the range of registers to be loaded.
2934 * In an other hand, IBM says this is valid, but rA won't be loaded.
2935 * For now, I'll follow the spec...
2937 static void gen_lswi(DisasContext *ctx)
2939 TCGv t0;
2940 TCGv_i32 t1, t2;
2941 int nb = NB(ctx->opcode);
2942 int start = rD(ctx->opcode);
2943 int ra = rA(ctx->opcode);
2944 int nr;
2946 if (nb == 0)
2947 nb = 32;
2948 nr = nb / 4;
2949 if (unlikely(((start + nr) > 32 &&
2950 start <= ra && (start + nr - 32) > ra) ||
2951 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2952 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2953 return;
2955 gen_set_access_type(ctx, ACCESS_INT);
2956 /* NIP cannot be restored if the memory exception comes from an helper */
2957 gen_update_nip(ctx, ctx->nip - 4);
2958 t0 = tcg_temp_new();
2959 gen_addr_register(ctx, t0);
2960 t1 = tcg_const_i32(nb);
2961 t2 = tcg_const_i32(start);
2962 gen_helper_lsw(cpu_env, t0, t1, t2);
2963 tcg_temp_free(t0);
2964 tcg_temp_free_i32(t1);
2965 tcg_temp_free_i32(t2);
2968 /* lswx */
2969 static void gen_lswx(DisasContext *ctx)
2971 TCGv t0;
2972 TCGv_i32 t1, t2, t3;
2973 gen_set_access_type(ctx, ACCESS_INT);
2974 /* NIP cannot be restored if the memory exception comes from an helper */
2975 gen_update_nip(ctx, ctx->nip - 4);
2976 t0 = tcg_temp_new();
2977 gen_addr_reg_index(ctx, t0);
2978 t1 = tcg_const_i32(rD(ctx->opcode));
2979 t2 = tcg_const_i32(rA(ctx->opcode));
2980 t3 = tcg_const_i32(rB(ctx->opcode));
2981 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
2982 tcg_temp_free(t0);
2983 tcg_temp_free_i32(t1);
2984 tcg_temp_free_i32(t2);
2985 tcg_temp_free_i32(t3);
2988 /* stswi */
2989 static void gen_stswi(DisasContext *ctx)
2991 TCGv t0;
2992 TCGv_i32 t1, t2;
2993 int nb = NB(ctx->opcode);
2994 gen_set_access_type(ctx, ACCESS_INT);
2995 /* NIP cannot be restored if the memory exception comes from an helper */
2996 gen_update_nip(ctx, ctx->nip - 4);
2997 t0 = tcg_temp_new();
2998 gen_addr_register(ctx, t0);
2999 if (nb == 0)
3000 nb = 32;
3001 t1 = tcg_const_i32(nb);
3002 t2 = tcg_const_i32(rS(ctx->opcode));
3003 gen_helper_stsw(cpu_env, t0, t1, t2);
3004 tcg_temp_free(t0);
3005 tcg_temp_free_i32(t1);
3006 tcg_temp_free_i32(t2);
3009 /* stswx */
3010 static void gen_stswx(DisasContext *ctx)
3012 TCGv t0;
3013 TCGv_i32 t1, t2;
3014 gen_set_access_type(ctx, ACCESS_INT);
3015 /* NIP cannot be restored if the memory exception comes from an helper */
3016 gen_update_nip(ctx, ctx->nip - 4);
3017 t0 = tcg_temp_new();
3018 gen_addr_reg_index(ctx, t0);
3019 t1 = tcg_temp_new_i32();
3020 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3021 tcg_gen_andi_i32(t1, t1, 0x7F);
3022 t2 = tcg_const_i32(rS(ctx->opcode));
3023 gen_helper_stsw(cpu_env, t0, t1, t2);
3024 tcg_temp_free(t0);
3025 tcg_temp_free_i32(t1);
3026 tcg_temp_free_i32(t2);
3029 /*** Memory synchronisation ***/
3030 /* eieio */
3031 static void gen_eieio(DisasContext *ctx)
3035 /* isync */
3036 static void gen_isync(DisasContext *ctx)
3038 gen_stop_exception(ctx);
3041 /* lwarx */
3042 static void gen_lwarx(DisasContext *ctx)
3044 TCGv t0;
3045 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3046 gen_set_access_type(ctx, ACCESS_RES);
3047 t0 = tcg_temp_local_new();
3048 gen_addr_reg_index(ctx, t0);
3049 gen_check_align(ctx, t0, 0x03);
3050 gen_qemu_ld32u(ctx, gpr, t0);
3051 tcg_gen_mov_tl(cpu_reserve, t0);
3052 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3053 tcg_temp_free(t0);
3056 #if defined(CONFIG_USER_ONLY)
3057 static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3058 int reg, int size)
3060 TCGv t0 = tcg_temp_new();
3061 uint32_t save_exception = ctx->exception;
3063 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea));
3064 tcg_gen_movi_tl(t0, (size << 5) | reg);
3065 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info));
3066 tcg_temp_free(t0);
3067 gen_update_nip(ctx, ctx->nip-4);
3068 ctx->exception = POWERPC_EXCP_BRANCH;
3069 gen_exception(ctx, POWERPC_EXCP_STCX);
3070 ctx->exception = save_exception;
3072 #endif
3074 /* stwcx. */
3075 static void gen_stwcx_(DisasContext *ctx)
3077 TCGv t0;
3078 gen_set_access_type(ctx, ACCESS_RES);
3079 t0 = tcg_temp_local_new();
3080 gen_addr_reg_index(ctx, t0);
3081 gen_check_align(ctx, t0, 0x03);
3082 #if defined(CONFIG_USER_ONLY)
3083 gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3084 #else
3086 int l1;
3088 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3089 l1 = gen_new_label();
3090 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3091 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3092 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3093 gen_set_label(l1);
3094 tcg_gen_movi_tl(cpu_reserve, -1);
3096 #endif
3097 tcg_temp_free(t0);
3100 #if defined(TARGET_PPC64)
3101 /* ldarx */
3102 static void gen_ldarx(DisasContext *ctx)
3104 TCGv t0;
3105 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3106 gen_set_access_type(ctx, ACCESS_RES);
3107 t0 = tcg_temp_local_new();
3108 gen_addr_reg_index(ctx, t0);
3109 gen_check_align(ctx, t0, 0x07);
3110 gen_qemu_ld64(ctx, gpr, t0);
3111 tcg_gen_mov_tl(cpu_reserve, t0);
3112 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3113 tcg_temp_free(t0);
3116 /* stdcx. */
3117 static void gen_stdcx_(DisasContext *ctx)
3119 TCGv t0;
3120 gen_set_access_type(ctx, ACCESS_RES);
3121 t0 = tcg_temp_local_new();
3122 gen_addr_reg_index(ctx, t0);
3123 gen_check_align(ctx, t0, 0x07);
3124 #if defined(CONFIG_USER_ONLY)
3125 gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3126 #else
3128 int l1;
3129 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3130 l1 = gen_new_label();
3131 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3132 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3133 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3134 gen_set_label(l1);
3135 tcg_gen_movi_tl(cpu_reserve, -1);
3137 #endif
3138 tcg_temp_free(t0);
3140 #endif /* defined(TARGET_PPC64) */
3142 /* sync */
3143 static void gen_sync(DisasContext *ctx)
3147 /* wait */
3148 static void gen_wait(DisasContext *ctx)
3150 TCGv_i32 t0 = tcg_temp_new_i32();
3151 tcg_gen_st_i32(t0, cpu_env,
3152 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3153 tcg_temp_free_i32(t0);
3154 /* Stop translation, as the CPU is supposed to sleep from now */
3155 gen_exception_err(ctx, EXCP_HLT, 1);
3158 /*** Floating-point load ***/
3159 #define GEN_LDF(name, ldop, opc, type) \
3160 static void glue(gen_, name)(DisasContext *ctx) \
3162 TCGv EA; \
3163 if (unlikely(!ctx->fpu_enabled)) { \
3164 gen_exception(ctx, POWERPC_EXCP_FPU); \
3165 return; \
3167 gen_set_access_type(ctx, ACCESS_FLOAT); \
3168 EA = tcg_temp_new(); \
3169 gen_addr_imm_index(ctx, EA, 0); \
3170 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3171 tcg_temp_free(EA); \
3174 #define GEN_LDUF(name, ldop, opc, type) \
3175 static void glue(gen_, name##u)(DisasContext *ctx) \
3177 TCGv EA; \
3178 if (unlikely(!ctx->fpu_enabled)) { \
3179 gen_exception(ctx, POWERPC_EXCP_FPU); \
3180 return; \
3182 if (unlikely(rA(ctx->opcode) == 0)) { \
3183 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3184 return; \
3186 gen_set_access_type(ctx, ACCESS_FLOAT); \
3187 EA = tcg_temp_new(); \
3188 gen_addr_imm_index(ctx, EA, 0); \
3189 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3190 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3191 tcg_temp_free(EA); \
3194 #define GEN_LDUXF(name, ldop, opc, type) \
3195 static void glue(gen_, name##ux)(DisasContext *ctx) \
3197 TCGv EA; \
3198 if (unlikely(!ctx->fpu_enabled)) { \
3199 gen_exception(ctx, POWERPC_EXCP_FPU); \
3200 return; \
3202 if (unlikely(rA(ctx->opcode) == 0)) { \
3203 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3204 return; \
3206 gen_set_access_type(ctx, ACCESS_FLOAT); \
3207 EA = tcg_temp_new(); \
3208 gen_addr_reg_index(ctx, EA); \
3209 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3210 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3211 tcg_temp_free(EA); \
3214 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3215 static void glue(gen_, name##x)(DisasContext *ctx) \
3217 TCGv EA; \
3218 if (unlikely(!ctx->fpu_enabled)) { \
3219 gen_exception(ctx, POWERPC_EXCP_FPU); \
3220 return; \
3222 gen_set_access_type(ctx, ACCESS_FLOAT); \
3223 EA = tcg_temp_new(); \
3224 gen_addr_reg_index(ctx, EA); \
3225 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3226 tcg_temp_free(EA); \
3229 #define GEN_LDFS(name, ldop, op, type) \
3230 GEN_LDF(name, ldop, op | 0x20, type); \
3231 GEN_LDUF(name, ldop, op | 0x21, type); \
3232 GEN_LDUXF(name, ldop, op | 0x01, type); \
3233 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3235 static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3237 TCGv t0 = tcg_temp_new();
3238 TCGv_i32 t1 = tcg_temp_new_i32();
3239 gen_qemu_ld32u(ctx, t0, arg2);
3240 tcg_gen_trunc_tl_i32(t1, t0);
3241 tcg_temp_free(t0);
3242 gen_helper_float32_to_float64(arg1, cpu_env, t1);
3243 tcg_temp_free_i32(t1);
3246 /* lfd lfdu lfdux lfdx */
3247 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3248 /* lfs lfsu lfsux lfsx */
3249 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3251 /*** Floating-point store ***/
3252 #define GEN_STF(name, stop, opc, type) \
3253 static void glue(gen_, name)(DisasContext *ctx) \
3255 TCGv EA; \
3256 if (unlikely(!ctx->fpu_enabled)) { \
3257 gen_exception(ctx, POWERPC_EXCP_FPU); \
3258 return; \
3260 gen_set_access_type(ctx, ACCESS_FLOAT); \
3261 EA = tcg_temp_new(); \
3262 gen_addr_imm_index(ctx, EA, 0); \
3263 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3264 tcg_temp_free(EA); \
3267 #define GEN_STUF(name, stop, opc, type) \
3268 static void glue(gen_, name##u)(DisasContext *ctx) \
3270 TCGv EA; \
3271 if (unlikely(!ctx->fpu_enabled)) { \
3272 gen_exception(ctx, POWERPC_EXCP_FPU); \
3273 return; \
3275 if (unlikely(rA(ctx->opcode) == 0)) { \
3276 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3277 return; \
3279 gen_set_access_type(ctx, ACCESS_FLOAT); \
3280 EA = tcg_temp_new(); \
3281 gen_addr_imm_index(ctx, EA, 0); \
3282 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3283 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3284 tcg_temp_free(EA); \
3287 #define GEN_STUXF(name, stop, opc, type) \
3288 static void glue(gen_, name##ux)(DisasContext *ctx) \
3290 TCGv EA; \
3291 if (unlikely(!ctx->fpu_enabled)) { \
3292 gen_exception(ctx, POWERPC_EXCP_FPU); \
3293 return; \
3295 if (unlikely(rA(ctx->opcode) == 0)) { \
3296 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3297 return; \
3299 gen_set_access_type(ctx, ACCESS_FLOAT); \
3300 EA = tcg_temp_new(); \
3301 gen_addr_reg_index(ctx, EA); \
3302 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3303 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3304 tcg_temp_free(EA); \
3307 #define GEN_STXF(name, stop, opc2, opc3, type) \
3308 static void glue(gen_, name##x)(DisasContext *ctx) \
3310 TCGv EA; \
3311 if (unlikely(!ctx->fpu_enabled)) { \
3312 gen_exception(ctx, POWERPC_EXCP_FPU); \
3313 return; \
3315 gen_set_access_type(ctx, ACCESS_FLOAT); \
3316 EA = tcg_temp_new(); \
3317 gen_addr_reg_index(ctx, EA); \
3318 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3319 tcg_temp_free(EA); \
3322 #define GEN_STFS(name, stop, op, type) \
3323 GEN_STF(name, stop, op | 0x20, type); \
3324 GEN_STUF(name, stop, op | 0x21, type); \
3325 GEN_STUXF(name, stop, op | 0x01, type); \
3326 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3328 static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3330 TCGv_i32 t0 = tcg_temp_new_i32();
3331 TCGv t1 = tcg_temp_new();
3332 gen_helper_float64_to_float32(t0, cpu_env, arg1);
3333 tcg_gen_extu_i32_tl(t1, t0);
3334 tcg_temp_free_i32(t0);
3335 gen_qemu_st32(ctx, t1, arg2);
3336 tcg_temp_free(t1);
3339 /* stfd stfdu stfdux stfdx */
3340 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3341 /* stfs stfsu stfsux stfsx */
3342 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3344 /* Optional: */
3345 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3347 TCGv t0 = tcg_temp_new();
3348 tcg_gen_trunc_i64_tl(t0, arg1),
3349 gen_qemu_st32(ctx, t0, arg2);
3350 tcg_temp_free(t0);
3352 /* stfiwx */
3353 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3355 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
3357 #if defined(TARGET_PPC64)
3358 if (ctx->has_cfar)
3359 tcg_gen_movi_tl(cpu_cfar, nip);
3360 #endif
3363 /*** Branch ***/
3364 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3366 TranslationBlock *tb;
3367 tb = ctx->tb;
3368 if (NARROW_MODE(ctx)) {
3369 dest = (uint32_t) dest;
3371 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3372 likely(!ctx->singlestep_enabled)) {
3373 tcg_gen_goto_tb(n);
3374 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3375 tcg_gen_exit_tb((tcg_target_long)tb + n);
3376 } else {
3377 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3378 if (unlikely(ctx->singlestep_enabled)) {
3379 if ((ctx->singlestep_enabled &
3380 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3381 (ctx->exception == POWERPC_EXCP_BRANCH ||
3382 ctx->exception == POWERPC_EXCP_TRACE)) {
3383 target_ulong tmp = ctx->nip;
3384 ctx->nip = dest;
3385 gen_exception(ctx, POWERPC_EXCP_TRACE);
3386 ctx->nip = tmp;
3388 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3389 gen_debug_exception(ctx);
3392 tcg_gen_exit_tb(0);
3396 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3398 if (NARROW_MODE(ctx)) {
3399 nip = (uint32_t)nip;
3401 tcg_gen_movi_tl(cpu_lr, nip);
3404 /* b ba bl bla */
3405 static void gen_b(DisasContext *ctx)
3407 target_ulong li, target;
3409 ctx->exception = POWERPC_EXCP_BRANCH;
3410 /* sign extend LI */
3411 li = LI(ctx->opcode);
3412 li = (li ^ 0x02000000) - 0x02000000;
3413 if (likely(AA(ctx->opcode) == 0)) {
3414 target = ctx->nip + li - 4;
3415 } else {
3416 target = li;
3418 if (LK(ctx->opcode)) {
3419 gen_setlr(ctx, ctx->nip);
3421 gen_update_cfar(ctx, ctx->nip);
3422 gen_goto_tb(ctx, 0, target);
3425 #define BCOND_IM 0
3426 #define BCOND_LR 1
3427 #define BCOND_CTR 2
3429 static inline void gen_bcond(DisasContext *ctx, int type)
3431 uint32_t bo = BO(ctx->opcode);
3432 int l1;
3433 TCGv target;
3435 ctx->exception = POWERPC_EXCP_BRANCH;
3436 if (type == BCOND_LR || type == BCOND_CTR) {
3437 target = tcg_temp_local_new();
3438 if (type == BCOND_CTR)
3439 tcg_gen_mov_tl(target, cpu_ctr);
3440 else
3441 tcg_gen_mov_tl(target, cpu_lr);
3442 } else {
3443 TCGV_UNUSED(target);
3445 if (LK(ctx->opcode))
3446 gen_setlr(ctx, ctx->nip);
3447 l1 = gen_new_label();
3448 if ((bo & 0x4) == 0) {
3449 /* Decrement and test CTR */
3450 TCGv temp = tcg_temp_new();
3451 if (unlikely(type == BCOND_CTR)) {
3452 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3453 return;
3455 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3456 if (NARROW_MODE(ctx)) {
3457 tcg_gen_ext32u_tl(temp, cpu_ctr);
3458 } else {
3459 tcg_gen_mov_tl(temp, cpu_ctr);
3461 if (bo & 0x2) {
3462 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3463 } else {
3464 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3466 tcg_temp_free(temp);
3468 if ((bo & 0x10) == 0) {
3469 /* Test CR */
3470 uint32_t bi = BI(ctx->opcode);
3471 uint32_t mask = 1 << (3 - (bi & 0x03));
3472 TCGv_i32 temp = tcg_temp_new_i32();
3474 if (bo & 0x8) {
3475 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3476 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3477 } else {
3478 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3479 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3481 tcg_temp_free_i32(temp);
3483 gen_update_cfar(ctx, ctx->nip);
3484 if (type == BCOND_IM) {
3485 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3486 if (likely(AA(ctx->opcode) == 0)) {
3487 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3488 } else {
3489 gen_goto_tb(ctx, 0, li);
3491 gen_set_label(l1);
3492 gen_goto_tb(ctx, 1, ctx->nip);
3493 } else {
3494 if (NARROW_MODE(ctx)) {
3495 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3496 } else {
3497 tcg_gen_andi_tl(cpu_nip, target, ~3);
3499 tcg_gen_exit_tb(0);
3500 gen_set_label(l1);
3501 gen_update_nip(ctx, ctx->nip);
3502 tcg_gen_exit_tb(0);
3506 static void gen_bc(DisasContext *ctx)
3508 gen_bcond(ctx, BCOND_IM);
3511 static void gen_bcctr(DisasContext *ctx)
3513 gen_bcond(ctx, BCOND_CTR);
3516 static void gen_bclr(DisasContext *ctx)
3518 gen_bcond(ctx, BCOND_LR);
3521 /*** Condition register logical ***/
3522 #define GEN_CRLOGIC(name, tcg_op, opc) \
3523 static void glue(gen_, name)(DisasContext *ctx) \
3525 uint8_t bitmask; \
3526 int sh; \
3527 TCGv_i32 t0, t1; \
3528 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3529 t0 = tcg_temp_new_i32(); \
3530 if (sh > 0) \
3531 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3532 else if (sh < 0) \
3533 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3534 else \
3535 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3536 t1 = tcg_temp_new_i32(); \
3537 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3538 if (sh > 0) \
3539 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3540 else if (sh < 0) \
3541 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3542 else \
3543 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3544 tcg_op(t0, t0, t1); \
3545 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3546 tcg_gen_andi_i32(t0, t0, bitmask); \
3547 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3548 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3549 tcg_temp_free_i32(t0); \
3550 tcg_temp_free_i32(t1); \
3553 /* crand */
3554 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3555 /* crandc */
3556 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3557 /* creqv */
3558 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3559 /* crnand */
3560 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3561 /* crnor */
3562 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3563 /* cror */
3564 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3565 /* crorc */
3566 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3567 /* crxor */
3568 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3570 /* mcrf */
3571 static void gen_mcrf(DisasContext *ctx)
3573 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3576 /*** System linkage ***/
3578 /* rfi (mem_idx only) */
3579 static void gen_rfi(DisasContext *ctx)
3581 #if defined(CONFIG_USER_ONLY)
3582 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3583 #else
3584 /* Restore CPU state */
3585 if (unlikely(!ctx->mem_idx)) {
3586 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3587 return;
3589 gen_update_cfar(ctx, ctx->nip);
3590 gen_helper_rfi(cpu_env);
3591 gen_sync_exception(ctx);
3592 #endif
3595 #if defined(TARGET_PPC64)
3596 static void gen_rfid(DisasContext *ctx)
3598 #if defined(CONFIG_USER_ONLY)
3599 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3600 #else
3601 /* Restore CPU state */
3602 if (unlikely(!ctx->mem_idx)) {
3603 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3604 return;
3606 gen_update_cfar(ctx, ctx->nip);
3607 gen_helper_rfid(cpu_env);
3608 gen_sync_exception(ctx);
3609 #endif
3612 static void gen_hrfid(DisasContext *ctx)
3614 #if defined(CONFIG_USER_ONLY)
3615 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3616 #else
3617 /* Restore CPU state */
3618 if (unlikely(ctx->mem_idx <= 1)) {
3619 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3620 return;
3622 gen_helper_hrfid(cpu_env);
3623 gen_sync_exception(ctx);
3624 #endif
3626 #endif
3628 /* sc */
3629 #if defined(CONFIG_USER_ONLY)
3630 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3631 #else
3632 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3633 #endif
3634 static void gen_sc(DisasContext *ctx)
3636 uint32_t lev;
3638 lev = (ctx->opcode >> 5) & 0x7F;
3639 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3642 /*** Trap ***/
3644 /* tw */
3645 static void gen_tw(DisasContext *ctx)
3647 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3648 /* Update the nip since this might generate a trap exception */
3649 gen_update_nip(ctx, ctx->nip);
3650 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3651 t0);
3652 tcg_temp_free_i32(t0);
3655 /* twi */
3656 static void gen_twi(DisasContext *ctx)
3658 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3659 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3660 /* Update the nip since this might generate a trap exception */
3661 gen_update_nip(ctx, ctx->nip);
3662 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3663 tcg_temp_free(t0);
3664 tcg_temp_free_i32(t1);
3667 #if defined(TARGET_PPC64)
3668 /* td */
3669 static void gen_td(DisasContext *ctx)
3671 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3672 /* Update the nip since this might generate a trap exception */
3673 gen_update_nip(ctx, ctx->nip);
3674 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3675 t0);
3676 tcg_temp_free_i32(t0);
3679 /* tdi */
3680 static void gen_tdi(DisasContext *ctx)
3682 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3683 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3684 /* Update the nip since this might generate a trap exception */
3685 gen_update_nip(ctx, ctx->nip);
3686 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3687 tcg_temp_free(t0);
3688 tcg_temp_free_i32(t1);
3690 #endif
3692 /*** Processor control ***/
3694 static void gen_read_xer(TCGv dst)
3696 TCGv t0 = tcg_temp_new();
3697 TCGv t1 = tcg_temp_new();
3698 TCGv t2 = tcg_temp_new();
3699 tcg_gen_mov_tl(dst, cpu_xer);
3700 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
3701 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
3702 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
3703 tcg_gen_or_tl(t0, t0, t1);
3704 tcg_gen_or_tl(dst, dst, t2);
3705 tcg_gen_or_tl(dst, dst, t0);
3706 tcg_temp_free(t0);
3707 tcg_temp_free(t1);
3708 tcg_temp_free(t2);
3711 static void gen_write_xer(TCGv src)
3713 tcg_gen_andi_tl(cpu_xer, src,
3714 ~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA)));
3715 tcg_gen_shri_tl(cpu_so, src, XER_SO);
3716 tcg_gen_shri_tl(cpu_ov, src, XER_OV);
3717 tcg_gen_shri_tl(cpu_ca, src, XER_CA);
3718 tcg_gen_andi_tl(cpu_so, cpu_so, 1);
3719 tcg_gen_andi_tl(cpu_ov, cpu_ov, 1);
3720 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
3723 /* mcrxr */
3724 static void gen_mcrxr(DisasContext *ctx)
3726 TCGv_i32 t0 = tcg_temp_new_i32();
3727 TCGv_i32 t1 = tcg_temp_new_i32();
3728 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
3730 tcg_gen_trunc_tl_i32(t0, cpu_so);
3731 tcg_gen_trunc_tl_i32(t1, cpu_ov);
3732 tcg_gen_trunc_tl_i32(dst, cpu_ca);
3733 tcg_gen_shri_i32(t0, t0, 2);
3734 tcg_gen_shri_i32(t1, t1, 1);
3735 tcg_gen_or_i32(dst, dst, t0);
3736 tcg_gen_or_i32(dst, dst, t1);
3737 tcg_temp_free_i32(t0);
3738 tcg_temp_free_i32(t1);
3740 tcg_gen_movi_tl(cpu_so, 0);
3741 tcg_gen_movi_tl(cpu_ov, 0);
3742 tcg_gen_movi_tl(cpu_ca, 0);
3745 /* mfcr mfocrf */
3746 static void gen_mfcr(DisasContext *ctx)
3748 uint32_t crm, crn;
3750 if (likely(ctx->opcode & 0x00100000)) {
3751 crm = CRM(ctx->opcode);
3752 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3753 crn = ctz32 (crm);
3754 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3755 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3756 cpu_gpr[rD(ctx->opcode)], crn * 4);
3758 } else {
3759 TCGv_i32 t0 = tcg_temp_new_i32();
3760 tcg_gen_mov_i32(t0, cpu_crf[0]);
3761 tcg_gen_shli_i32(t0, t0, 4);
3762 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3763 tcg_gen_shli_i32(t0, t0, 4);
3764 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3765 tcg_gen_shli_i32(t0, t0, 4);
3766 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3767 tcg_gen_shli_i32(t0, t0, 4);
3768 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3769 tcg_gen_shli_i32(t0, t0, 4);
3770 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3771 tcg_gen_shli_i32(t0, t0, 4);
3772 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3773 tcg_gen_shli_i32(t0, t0, 4);
3774 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3775 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3776 tcg_temp_free_i32(t0);
3780 /* mfmsr */
3781 static void gen_mfmsr(DisasContext *ctx)
3783 #if defined(CONFIG_USER_ONLY)
3784 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3785 #else
3786 if (unlikely(!ctx->mem_idx)) {
3787 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3788 return;
3790 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3791 #endif
3794 static void spr_noaccess(void *opaque, int gprn, int sprn)
3796 #if 0
3797 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3798 printf("ERROR: try to access SPR %d !\n", sprn);
3799 #endif
3801 #define SPR_NOACCESS (&spr_noaccess)
3803 /* mfspr */
3804 static inline void gen_op_mfspr(DisasContext *ctx)
3806 void (*read_cb)(void *opaque, int gprn, int sprn);
3807 uint32_t sprn = SPR(ctx->opcode);
3809 #if !defined(CONFIG_USER_ONLY)
3810 if (ctx->mem_idx == 2)
3811 read_cb = ctx->spr_cb[sprn].hea_read;
3812 else if (ctx->mem_idx)
3813 read_cb = ctx->spr_cb[sprn].oea_read;
3814 else
3815 #endif
3816 read_cb = ctx->spr_cb[sprn].uea_read;
3817 if (likely(read_cb != NULL)) {
3818 if (likely(read_cb != SPR_NOACCESS)) {
3819 (*read_cb)(ctx, rD(ctx->opcode), sprn);
3820 } else {
3821 /* Privilege exception */
3822 /* This is a hack to avoid warnings when running Linux:
3823 * this OS breaks the PowerPC virtualisation model,
3824 * allowing userland application to read the PVR
3826 if (sprn != SPR_PVR) {
3827 qemu_log("Trying to read privileged spr %d %03x at "
3828 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3829 printf("Trying to read privileged spr %d %03x at "
3830 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3832 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3834 } else {
3835 /* Not defined */
3836 qemu_log("Trying to read invalid spr %d %03x at "
3837 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3838 printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
3839 sprn, sprn, ctx->nip);
3840 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3844 static void gen_mfspr(DisasContext *ctx)
3846 gen_op_mfspr(ctx);
3849 /* mftb */
3850 static void gen_mftb(DisasContext *ctx)
3852 gen_op_mfspr(ctx);
3855 /* mtcrf mtocrf*/
3856 static void gen_mtcrf(DisasContext *ctx)
3858 uint32_t crm, crn;
3860 crm = CRM(ctx->opcode);
3861 if (likely((ctx->opcode & 0x00100000))) {
3862 if (crm && ((crm & (crm - 1)) == 0)) {
3863 TCGv_i32 temp = tcg_temp_new_i32();
3864 crn = ctz32 (crm);
3865 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3866 tcg_gen_shri_i32(temp, temp, crn * 4);
3867 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3868 tcg_temp_free_i32(temp);
3870 } else {
3871 TCGv_i32 temp = tcg_temp_new_i32();
3872 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3873 for (crn = 0 ; crn < 8 ; crn++) {
3874 if (crm & (1 << crn)) {
3875 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3876 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3879 tcg_temp_free_i32(temp);
3883 /* mtmsr */
3884 #if defined(TARGET_PPC64)
3885 static void gen_mtmsrd(DisasContext *ctx)
3887 #if defined(CONFIG_USER_ONLY)
3888 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3889 #else
3890 if (unlikely(!ctx->mem_idx)) {
3891 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3892 return;
3894 if (ctx->opcode & 0x00010000) {
3895 /* Special form that does not need any synchronisation */
3896 TCGv t0 = tcg_temp_new();
3897 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3898 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3899 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3900 tcg_temp_free(t0);
3901 } else {
3902 /* XXX: we need to update nip before the store
3903 * if we enter power saving mode, we will exit the loop
3904 * directly from ppc_store_msr
3906 gen_update_nip(ctx, ctx->nip);
3907 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
3908 /* Must stop the translation as machine state (may have) changed */
3909 /* Note that mtmsr is not always defined as context-synchronizing */
3910 gen_stop_exception(ctx);
3912 #endif
3914 #endif
3916 static void gen_mtmsr(DisasContext *ctx)
3918 #if defined(CONFIG_USER_ONLY)
3919 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3920 #else
3921 if (unlikely(!ctx->mem_idx)) {
3922 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3923 return;
3925 if (ctx->opcode & 0x00010000) {
3926 /* Special form that does not need any synchronisation */
3927 TCGv t0 = tcg_temp_new();
3928 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3929 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3930 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3931 tcg_temp_free(t0);
3932 } else {
3933 TCGv msr = tcg_temp_new();
3935 /* XXX: we need to update nip before the store
3936 * if we enter power saving mode, we will exit the loop
3937 * directly from ppc_store_msr
3939 gen_update_nip(ctx, ctx->nip);
3940 #if defined(TARGET_PPC64)
3941 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
3942 #else
3943 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
3944 #endif
3945 gen_helper_store_msr(cpu_env, msr);
3946 /* Must stop the translation as machine state (may have) changed */
3947 /* Note that mtmsr is not always defined as context-synchronizing */
3948 gen_stop_exception(ctx);
3950 #endif
3953 /* mtspr */
3954 static void gen_mtspr(DisasContext *ctx)
3956 void (*write_cb)(void *opaque, int sprn, int gprn);
3957 uint32_t sprn = SPR(ctx->opcode);
3959 #if !defined(CONFIG_USER_ONLY)
3960 if (ctx->mem_idx == 2)
3961 write_cb = ctx->spr_cb[sprn].hea_write;
3962 else if (ctx->mem_idx)
3963 write_cb = ctx->spr_cb[sprn].oea_write;
3964 else
3965 #endif
3966 write_cb = ctx->spr_cb[sprn].uea_write;
3967 if (likely(write_cb != NULL)) {
3968 if (likely(write_cb != SPR_NOACCESS)) {
3969 (*write_cb)(ctx, sprn, rS(ctx->opcode));
3970 } else {
3971 /* Privilege exception */
3972 qemu_log("Trying to write privileged spr %d %03x at "
3973 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3974 printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
3975 "\n", sprn, sprn, ctx->nip);
3976 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3978 } else {
3979 /* Not defined */
3980 qemu_log("Trying to write invalid spr %d %03x at "
3981 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3982 printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
3983 sprn, sprn, ctx->nip);
3984 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3988 /*** Cache management ***/
3990 /* dcbf */
3991 static void gen_dcbf(DisasContext *ctx)
3993 /* XXX: specification says this is treated as a load by the MMU */
3994 TCGv t0;
3995 gen_set_access_type(ctx, ACCESS_CACHE);
3996 t0 = tcg_temp_new();
3997 gen_addr_reg_index(ctx, t0);
3998 gen_qemu_ld8u(ctx, t0, t0);
3999 tcg_temp_free(t0);
4002 /* dcbi (Supervisor only) */
4003 static void gen_dcbi(DisasContext *ctx)
4005 #if defined(CONFIG_USER_ONLY)
4006 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4007 #else
4008 TCGv EA, val;
4009 if (unlikely(!ctx->mem_idx)) {
4010 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4011 return;
4013 EA = tcg_temp_new();
4014 gen_set_access_type(ctx, ACCESS_CACHE);
4015 gen_addr_reg_index(ctx, EA);
4016 val = tcg_temp_new();
4017 /* XXX: specification says this should be treated as a store by the MMU */
4018 gen_qemu_ld8u(ctx, val, EA);
4019 gen_qemu_st8(ctx, val, EA);
4020 tcg_temp_free(val);
4021 tcg_temp_free(EA);
4022 #endif
4025 /* dcdst */
4026 static void gen_dcbst(DisasContext *ctx)
4028 /* XXX: specification say this is treated as a load by the MMU */
4029 TCGv t0;
4030 gen_set_access_type(ctx, ACCESS_CACHE);
4031 t0 = tcg_temp_new();
4032 gen_addr_reg_index(ctx, t0);
4033 gen_qemu_ld8u(ctx, t0, t0);
4034 tcg_temp_free(t0);
4037 /* dcbt */
4038 static void gen_dcbt(DisasContext *ctx)
4040 /* interpreted as no-op */
4041 /* XXX: specification say this is treated as a load by the MMU
4042 * but does not generate any exception
4046 /* dcbtst */
4047 static void gen_dcbtst(DisasContext *ctx)
4049 /* interpreted as no-op */
4050 /* XXX: specification say this is treated as a load by the MMU
4051 * but does not generate any exception
4055 /* dcbz */
4056 static void gen_dcbz(DisasContext *ctx)
4058 TCGv tcgv_addr;
4059 TCGv_i32 tcgv_is_dcbzl;
4060 int is_dcbzl = ctx->opcode & 0x00200000 ? 1 : 0;
4062 gen_set_access_type(ctx, ACCESS_CACHE);
4063 /* NIP cannot be restored if the memory exception comes from an helper */
4064 gen_update_nip(ctx, ctx->nip - 4);
4065 tcgv_addr = tcg_temp_new();
4066 tcgv_is_dcbzl = tcg_const_i32(is_dcbzl);
4068 gen_addr_reg_index(ctx, tcgv_addr);
4069 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_is_dcbzl);
4071 tcg_temp_free(tcgv_addr);
4072 tcg_temp_free_i32(tcgv_is_dcbzl);
4075 /* dst / dstt */
4076 static void gen_dst(DisasContext *ctx)
4078 if (rA(ctx->opcode) == 0) {
4079 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4080 } else {
4081 /* interpreted as no-op */
4085 /* dstst /dststt */
4086 static void gen_dstst(DisasContext *ctx)
4088 if (rA(ctx->opcode) == 0) {
4089 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4090 } else {
4091 /* interpreted as no-op */
4096 /* dss / dssall */
4097 static void gen_dss(DisasContext *ctx)
4099 /* interpreted as no-op */
4102 /* icbi */
4103 static void gen_icbi(DisasContext *ctx)
4105 TCGv t0;
4106 gen_set_access_type(ctx, ACCESS_CACHE);
4107 /* NIP cannot be restored if the memory exception comes from an helper */
4108 gen_update_nip(ctx, ctx->nip - 4);
4109 t0 = tcg_temp_new();
4110 gen_addr_reg_index(ctx, t0);
4111 gen_helper_icbi(cpu_env, t0);
4112 tcg_temp_free(t0);
4115 /* Optional: */
4116 /* dcba */
4117 static void gen_dcba(DisasContext *ctx)
4119 /* interpreted as no-op */
4120 /* XXX: specification say this is treated as a store by the MMU
4121 * but does not generate any exception
4125 /*** Segment register manipulation ***/
4126 /* Supervisor only: */
4128 /* mfsr */
4129 static void gen_mfsr(DisasContext *ctx)
4131 #if defined(CONFIG_USER_ONLY)
4132 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4133 #else
4134 TCGv t0;
4135 if (unlikely(!ctx->mem_idx)) {
4136 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4137 return;
4139 t0 = tcg_const_tl(SR(ctx->opcode));
4140 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4141 tcg_temp_free(t0);
4142 #endif
4145 /* mfsrin */
4146 static void gen_mfsrin(DisasContext *ctx)
4148 #if defined(CONFIG_USER_ONLY)
4149 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4150 #else
4151 TCGv t0;
4152 if (unlikely(!ctx->mem_idx)) {
4153 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4154 return;
4156 t0 = tcg_temp_new();
4157 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4158 tcg_gen_andi_tl(t0, t0, 0xF);
4159 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4160 tcg_temp_free(t0);
4161 #endif
4164 /* mtsr */
4165 static void gen_mtsr(DisasContext *ctx)
4167 #if defined(CONFIG_USER_ONLY)
4168 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4169 #else
4170 TCGv t0;
4171 if (unlikely(!ctx->mem_idx)) {
4172 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4173 return;
4175 t0 = tcg_const_tl(SR(ctx->opcode));
4176 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4177 tcg_temp_free(t0);
4178 #endif
4181 /* mtsrin */
4182 static void gen_mtsrin(DisasContext *ctx)
4184 #if defined(CONFIG_USER_ONLY)
4185 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4186 #else
4187 TCGv t0;
4188 if (unlikely(!ctx->mem_idx)) {
4189 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4190 return;
4192 t0 = tcg_temp_new();
4193 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4194 tcg_gen_andi_tl(t0, t0, 0xF);
4195 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
4196 tcg_temp_free(t0);
4197 #endif
4200 #if defined(TARGET_PPC64)
4201 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4203 /* mfsr */
4204 static void gen_mfsr_64b(DisasContext *ctx)
4206 #if defined(CONFIG_USER_ONLY)
4207 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4208 #else
4209 TCGv t0;
4210 if (unlikely(!ctx->mem_idx)) {
4211 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4212 return;
4214 t0 = tcg_const_tl(SR(ctx->opcode));
4215 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4216 tcg_temp_free(t0);
4217 #endif
4220 /* mfsrin */
4221 static void gen_mfsrin_64b(DisasContext *ctx)
4223 #if defined(CONFIG_USER_ONLY)
4224 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4225 #else
4226 TCGv t0;
4227 if (unlikely(!ctx->mem_idx)) {
4228 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4229 return;
4231 t0 = tcg_temp_new();
4232 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4233 tcg_gen_andi_tl(t0, t0, 0xF);
4234 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4235 tcg_temp_free(t0);
4236 #endif
4239 /* mtsr */
4240 static void gen_mtsr_64b(DisasContext *ctx)
4242 #if defined(CONFIG_USER_ONLY)
4243 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4244 #else
4245 TCGv t0;
4246 if (unlikely(!ctx->mem_idx)) {
4247 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4248 return;
4250 t0 = tcg_const_tl(SR(ctx->opcode));
4251 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4252 tcg_temp_free(t0);
4253 #endif
4256 /* mtsrin */
4257 static void gen_mtsrin_64b(DisasContext *ctx)
4259 #if defined(CONFIG_USER_ONLY)
4260 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4261 #else
4262 TCGv t0;
4263 if (unlikely(!ctx->mem_idx)) {
4264 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4265 return;
4267 t0 = tcg_temp_new();
4268 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4269 tcg_gen_andi_tl(t0, t0, 0xF);
4270 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4271 tcg_temp_free(t0);
4272 #endif
4275 /* slbmte */
4276 static void gen_slbmte(DisasContext *ctx)
4278 #if defined(CONFIG_USER_ONLY)
4279 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4280 #else
4281 if (unlikely(!ctx->mem_idx)) {
4282 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4283 return;
4285 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
4286 cpu_gpr[rS(ctx->opcode)]);
4287 #endif
4290 static void gen_slbmfee(DisasContext *ctx)
4292 #if defined(CONFIG_USER_ONLY)
4293 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4294 #else
4295 if (unlikely(!ctx->mem_idx)) {
4296 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4297 return;
4299 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4300 cpu_gpr[rB(ctx->opcode)]);
4301 #endif
4304 static void gen_slbmfev(DisasContext *ctx)
4306 #if defined(CONFIG_USER_ONLY)
4307 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4308 #else
4309 if (unlikely(!ctx->mem_idx)) {
4310 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4311 return;
4313 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4314 cpu_gpr[rB(ctx->opcode)]);
4315 #endif
4317 #endif /* defined(TARGET_PPC64) */
4319 /*** Lookaside buffer management ***/
4320 /* Optional & mem_idx only: */
4322 /* tlbia */
4323 static void gen_tlbia(DisasContext *ctx)
4325 #if defined(CONFIG_USER_ONLY)
4326 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4327 #else
4328 if (unlikely(!ctx->mem_idx)) {
4329 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4330 return;
4332 gen_helper_tlbia(cpu_env);
4333 #endif
4336 /* tlbiel */
4337 static void gen_tlbiel(DisasContext *ctx)
4339 #if defined(CONFIG_USER_ONLY)
4340 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4341 #else
4342 if (unlikely(!ctx->mem_idx)) {
4343 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4344 return;
4346 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4347 #endif
4350 /* tlbie */
4351 static void gen_tlbie(DisasContext *ctx)
4353 #if defined(CONFIG_USER_ONLY)
4354 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4355 #else
4356 if (unlikely(!ctx->mem_idx)) {
4357 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4358 return;
4360 if (NARROW_MODE(ctx)) {
4361 TCGv t0 = tcg_temp_new();
4362 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4363 gen_helper_tlbie(cpu_env, t0);
4364 tcg_temp_free(t0);
4365 } else {
4366 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4368 #endif
4371 /* tlbsync */
4372 static void gen_tlbsync(DisasContext *ctx)
4374 #if defined(CONFIG_USER_ONLY)
4375 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4376 #else
4377 if (unlikely(!ctx->mem_idx)) {
4378 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4379 return;
4381 /* This has no effect: it should ensure that all previous
4382 * tlbie have completed
4384 gen_stop_exception(ctx);
4385 #endif
4388 #if defined(TARGET_PPC64)
4389 /* slbia */
4390 static void gen_slbia(DisasContext *ctx)
4392 #if defined(CONFIG_USER_ONLY)
4393 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4394 #else
4395 if (unlikely(!ctx->mem_idx)) {
4396 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4397 return;
4399 gen_helper_slbia(cpu_env);
4400 #endif
4403 /* slbie */
4404 static void gen_slbie(DisasContext *ctx)
4406 #if defined(CONFIG_USER_ONLY)
4407 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4408 #else
4409 if (unlikely(!ctx->mem_idx)) {
4410 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4411 return;
4413 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4414 #endif
4416 #endif
4418 /*** External control ***/
4419 /* Optional: */
4421 /* eciwx */
4422 static void gen_eciwx(DisasContext *ctx)
4424 TCGv t0;
4425 /* Should check EAR[E] ! */
4426 gen_set_access_type(ctx, ACCESS_EXT);
4427 t0 = tcg_temp_new();
4428 gen_addr_reg_index(ctx, t0);
4429 gen_check_align(ctx, t0, 0x03);
4430 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4431 tcg_temp_free(t0);
4434 /* ecowx */
4435 static void gen_ecowx(DisasContext *ctx)
4437 TCGv t0;
4438 /* Should check EAR[E] ! */
4439 gen_set_access_type(ctx, ACCESS_EXT);
4440 t0 = tcg_temp_new();
4441 gen_addr_reg_index(ctx, t0);
4442 gen_check_align(ctx, t0, 0x03);
4443 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4444 tcg_temp_free(t0);
4447 /* PowerPC 601 specific instructions */
4449 /* abs - abs. */
4450 static void gen_abs(DisasContext *ctx)
4452 int l1 = gen_new_label();
4453 int l2 = gen_new_label();
4454 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4455 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4456 tcg_gen_br(l2);
4457 gen_set_label(l1);
4458 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4459 gen_set_label(l2);
4460 if (unlikely(Rc(ctx->opcode) != 0))
4461 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4464 /* abso - abso. */
4465 static void gen_abso(DisasContext *ctx)
4467 int l1 = gen_new_label();
4468 int l2 = gen_new_label();
4469 int l3 = gen_new_label();
4470 /* Start with XER OV disabled, the most likely case */
4471 tcg_gen_movi_tl(cpu_ov, 0);
4472 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4473 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4474 tcg_gen_movi_tl(cpu_ov, 1);
4475 tcg_gen_movi_tl(cpu_so, 1);
4476 tcg_gen_br(l2);
4477 gen_set_label(l1);
4478 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4479 tcg_gen_br(l3);
4480 gen_set_label(l2);
4481 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4482 gen_set_label(l3);
4483 if (unlikely(Rc(ctx->opcode) != 0))
4484 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4487 /* clcs */
4488 static void gen_clcs(DisasContext *ctx)
4490 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4491 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4492 tcg_temp_free_i32(t0);
4493 /* Rc=1 sets CR0 to an undefined state */
4496 /* div - div. */
4497 static void gen_div(DisasContext *ctx)
4499 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4500 cpu_gpr[rB(ctx->opcode)]);
4501 if (unlikely(Rc(ctx->opcode) != 0))
4502 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4505 /* divo - divo. */
4506 static void gen_divo(DisasContext *ctx)
4508 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4509 cpu_gpr[rB(ctx->opcode)]);
4510 if (unlikely(Rc(ctx->opcode) != 0))
4511 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4514 /* divs - divs. */
4515 static void gen_divs(DisasContext *ctx)
4517 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4518 cpu_gpr[rB(ctx->opcode)]);
4519 if (unlikely(Rc(ctx->opcode) != 0))
4520 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4523 /* divso - divso. */
4524 static void gen_divso(DisasContext *ctx)
4526 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
4527 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4528 if (unlikely(Rc(ctx->opcode) != 0))
4529 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4532 /* doz - doz. */
4533 static void gen_doz(DisasContext *ctx)
4535 int l1 = gen_new_label();
4536 int l2 = gen_new_label();
4537 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4538 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4539 tcg_gen_br(l2);
4540 gen_set_label(l1);
4541 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4542 gen_set_label(l2);
4543 if (unlikely(Rc(ctx->opcode) != 0))
4544 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4547 /* dozo - dozo. */
4548 static void gen_dozo(DisasContext *ctx)
4550 int l1 = gen_new_label();
4551 int l2 = gen_new_label();
4552 TCGv t0 = tcg_temp_new();
4553 TCGv t1 = tcg_temp_new();
4554 TCGv t2 = tcg_temp_new();
4555 /* Start with XER OV disabled, the most likely case */
4556 tcg_gen_movi_tl(cpu_ov, 0);
4557 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4558 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4559 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4560 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4561 tcg_gen_andc_tl(t1, t1, t2);
4562 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4563 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4564 tcg_gen_movi_tl(cpu_ov, 1);
4565 tcg_gen_movi_tl(cpu_so, 1);
4566 tcg_gen_br(l2);
4567 gen_set_label(l1);
4568 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4569 gen_set_label(l2);
4570 tcg_temp_free(t0);
4571 tcg_temp_free(t1);
4572 tcg_temp_free(t2);
4573 if (unlikely(Rc(ctx->opcode) != 0))
4574 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4577 /* dozi */
4578 static void gen_dozi(DisasContext *ctx)
4580 target_long simm = SIMM(ctx->opcode);
4581 int l1 = gen_new_label();
4582 int l2 = gen_new_label();
4583 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4584 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4585 tcg_gen_br(l2);
4586 gen_set_label(l1);
4587 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4588 gen_set_label(l2);
4589 if (unlikely(Rc(ctx->opcode) != 0))
4590 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4593 /* lscbx - lscbx. */
4594 static void gen_lscbx(DisasContext *ctx)
4596 TCGv t0 = tcg_temp_new();
4597 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4598 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4599 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4601 gen_addr_reg_index(ctx, t0);
4602 /* NIP cannot be restored if the memory exception comes from an helper */
4603 gen_update_nip(ctx, ctx->nip - 4);
4604 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
4605 tcg_temp_free_i32(t1);
4606 tcg_temp_free_i32(t2);
4607 tcg_temp_free_i32(t3);
4608 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4609 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4610 if (unlikely(Rc(ctx->opcode) != 0))
4611 gen_set_Rc0(ctx, t0);
4612 tcg_temp_free(t0);
4615 /* maskg - maskg. */
4616 static void gen_maskg(DisasContext *ctx)
4618 int l1 = gen_new_label();
4619 TCGv t0 = tcg_temp_new();
4620 TCGv t1 = tcg_temp_new();
4621 TCGv t2 = tcg_temp_new();
4622 TCGv t3 = tcg_temp_new();
4623 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4624 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4625 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4626 tcg_gen_addi_tl(t2, t0, 1);
4627 tcg_gen_shr_tl(t2, t3, t2);
4628 tcg_gen_shr_tl(t3, t3, t1);
4629 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4630 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4631 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4632 gen_set_label(l1);
4633 tcg_temp_free(t0);
4634 tcg_temp_free(t1);
4635 tcg_temp_free(t2);
4636 tcg_temp_free(t3);
4637 if (unlikely(Rc(ctx->opcode) != 0))
4638 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4641 /* maskir - maskir. */
4642 static void gen_maskir(DisasContext *ctx)
4644 TCGv t0 = tcg_temp_new();
4645 TCGv t1 = tcg_temp_new();
4646 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4647 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4648 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4649 tcg_temp_free(t0);
4650 tcg_temp_free(t1);
4651 if (unlikely(Rc(ctx->opcode) != 0))
4652 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4655 /* mul - mul. */
4656 static void gen_mul(DisasContext *ctx)
4658 TCGv_i64 t0 = tcg_temp_new_i64();
4659 TCGv_i64 t1 = tcg_temp_new_i64();
4660 TCGv t2 = tcg_temp_new();
4661 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4662 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4663 tcg_gen_mul_i64(t0, t0, t1);
4664 tcg_gen_trunc_i64_tl(t2, t0);
4665 gen_store_spr(SPR_MQ, t2);
4666 tcg_gen_shri_i64(t1, t0, 32);
4667 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4668 tcg_temp_free_i64(t0);
4669 tcg_temp_free_i64(t1);
4670 tcg_temp_free(t2);
4671 if (unlikely(Rc(ctx->opcode) != 0))
4672 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4675 /* mulo - mulo. */
4676 static void gen_mulo(DisasContext *ctx)
4678 int l1 = gen_new_label();
4679 TCGv_i64 t0 = tcg_temp_new_i64();
4680 TCGv_i64 t1 = tcg_temp_new_i64();
4681 TCGv t2 = tcg_temp_new();
4682 /* Start with XER OV disabled, the most likely case */
4683 tcg_gen_movi_tl(cpu_ov, 0);
4684 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4685 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4686 tcg_gen_mul_i64(t0, t0, t1);
4687 tcg_gen_trunc_i64_tl(t2, t0);
4688 gen_store_spr(SPR_MQ, t2);
4689 tcg_gen_shri_i64(t1, t0, 32);
4690 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4691 tcg_gen_ext32s_i64(t1, t0);
4692 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4693 tcg_gen_movi_tl(cpu_ov, 1);
4694 tcg_gen_movi_tl(cpu_so, 1);
4695 gen_set_label(l1);
4696 tcg_temp_free_i64(t0);
4697 tcg_temp_free_i64(t1);
4698 tcg_temp_free(t2);
4699 if (unlikely(Rc(ctx->opcode) != 0))
4700 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4703 /* nabs - nabs. */
4704 static void gen_nabs(DisasContext *ctx)
4706 int l1 = gen_new_label();
4707 int l2 = gen_new_label();
4708 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4709 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4710 tcg_gen_br(l2);
4711 gen_set_label(l1);
4712 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4713 gen_set_label(l2);
4714 if (unlikely(Rc(ctx->opcode) != 0))
4715 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4718 /* nabso - nabso. */
4719 static void gen_nabso(DisasContext *ctx)
4721 int l1 = gen_new_label();
4722 int l2 = gen_new_label();
4723 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4724 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4725 tcg_gen_br(l2);
4726 gen_set_label(l1);
4727 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4728 gen_set_label(l2);
4729 /* nabs never overflows */
4730 tcg_gen_movi_tl(cpu_ov, 0);
4731 if (unlikely(Rc(ctx->opcode) != 0))
4732 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4735 /* rlmi - rlmi. */
4736 static void gen_rlmi(DisasContext *ctx)
4738 uint32_t mb = MB(ctx->opcode);
4739 uint32_t me = ME(ctx->opcode);
4740 TCGv t0 = tcg_temp_new();
4741 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4742 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4743 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4744 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4745 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4746 tcg_temp_free(t0);
4747 if (unlikely(Rc(ctx->opcode) != 0))
4748 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4751 /* rrib - rrib. */
4752 static void gen_rrib(DisasContext *ctx)
4754 TCGv t0 = tcg_temp_new();
4755 TCGv t1 = tcg_temp_new();
4756 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4757 tcg_gen_movi_tl(t1, 0x80000000);
4758 tcg_gen_shr_tl(t1, t1, t0);
4759 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4760 tcg_gen_and_tl(t0, t0, t1);
4761 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4762 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4763 tcg_temp_free(t0);
4764 tcg_temp_free(t1);
4765 if (unlikely(Rc(ctx->opcode) != 0))
4766 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4769 /* sle - sle. */
4770 static void gen_sle(DisasContext *ctx)
4772 TCGv t0 = tcg_temp_new();
4773 TCGv t1 = tcg_temp_new();
4774 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4775 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4776 tcg_gen_subfi_tl(t1, 32, t1);
4777 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4778 tcg_gen_or_tl(t1, t0, t1);
4779 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4780 gen_store_spr(SPR_MQ, t1);
4781 tcg_temp_free(t0);
4782 tcg_temp_free(t1);
4783 if (unlikely(Rc(ctx->opcode) != 0))
4784 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4787 /* sleq - sleq. */
4788 static void gen_sleq(DisasContext *ctx)
4790 TCGv t0 = tcg_temp_new();
4791 TCGv t1 = tcg_temp_new();
4792 TCGv t2 = tcg_temp_new();
4793 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4794 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4795 tcg_gen_shl_tl(t2, t2, t0);
4796 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4797 gen_load_spr(t1, SPR_MQ);
4798 gen_store_spr(SPR_MQ, t0);
4799 tcg_gen_and_tl(t0, t0, t2);
4800 tcg_gen_andc_tl(t1, t1, t2);
4801 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4802 tcg_temp_free(t0);
4803 tcg_temp_free(t1);
4804 tcg_temp_free(t2);
4805 if (unlikely(Rc(ctx->opcode) != 0))
4806 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4809 /* sliq - sliq. */
4810 static void gen_sliq(DisasContext *ctx)
4812 int sh = SH(ctx->opcode);
4813 TCGv t0 = tcg_temp_new();
4814 TCGv t1 = tcg_temp_new();
4815 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4816 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4817 tcg_gen_or_tl(t1, t0, t1);
4818 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4819 gen_store_spr(SPR_MQ, t1);
4820 tcg_temp_free(t0);
4821 tcg_temp_free(t1);
4822 if (unlikely(Rc(ctx->opcode) != 0))
4823 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4826 /* slliq - slliq. */
4827 static void gen_slliq(DisasContext *ctx)
4829 int sh = SH(ctx->opcode);
4830 TCGv t0 = tcg_temp_new();
4831 TCGv t1 = tcg_temp_new();
4832 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4833 gen_load_spr(t1, SPR_MQ);
4834 gen_store_spr(SPR_MQ, t0);
4835 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4836 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4837 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4838 tcg_temp_free(t0);
4839 tcg_temp_free(t1);
4840 if (unlikely(Rc(ctx->opcode) != 0))
4841 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4844 /* sllq - sllq. */
4845 static void gen_sllq(DisasContext *ctx)
4847 int l1 = gen_new_label();
4848 int l2 = gen_new_label();
4849 TCGv t0 = tcg_temp_local_new();
4850 TCGv t1 = tcg_temp_local_new();
4851 TCGv t2 = tcg_temp_local_new();
4852 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4853 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4854 tcg_gen_shl_tl(t1, t1, t2);
4855 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4856 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4857 gen_load_spr(t0, SPR_MQ);
4858 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4859 tcg_gen_br(l2);
4860 gen_set_label(l1);
4861 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4862 gen_load_spr(t2, SPR_MQ);
4863 tcg_gen_andc_tl(t1, t2, t1);
4864 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4865 gen_set_label(l2);
4866 tcg_temp_free(t0);
4867 tcg_temp_free(t1);
4868 tcg_temp_free(t2);
4869 if (unlikely(Rc(ctx->opcode) != 0))
4870 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4873 /* slq - slq. */
4874 static void gen_slq(DisasContext *ctx)
4876 int l1 = gen_new_label();
4877 TCGv t0 = tcg_temp_new();
4878 TCGv t1 = tcg_temp_new();
4879 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4880 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4881 tcg_gen_subfi_tl(t1, 32, t1);
4882 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4883 tcg_gen_or_tl(t1, t0, t1);
4884 gen_store_spr(SPR_MQ, t1);
4885 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4886 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4887 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4888 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4889 gen_set_label(l1);
4890 tcg_temp_free(t0);
4891 tcg_temp_free(t1);
4892 if (unlikely(Rc(ctx->opcode) != 0))
4893 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4896 /* sraiq - sraiq. */
4897 static void gen_sraiq(DisasContext *ctx)
4899 int sh = SH(ctx->opcode);
4900 int l1 = gen_new_label();
4901 TCGv t0 = tcg_temp_new();
4902 TCGv t1 = tcg_temp_new();
4903 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4904 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4905 tcg_gen_or_tl(t0, t0, t1);
4906 gen_store_spr(SPR_MQ, t0);
4907 tcg_gen_movi_tl(cpu_ca, 0);
4908 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4909 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4910 tcg_gen_movi_tl(cpu_ca, 1);
4911 gen_set_label(l1);
4912 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4913 tcg_temp_free(t0);
4914 tcg_temp_free(t1);
4915 if (unlikely(Rc(ctx->opcode) != 0))
4916 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4919 /* sraq - sraq. */
4920 static void gen_sraq(DisasContext *ctx)
4922 int l1 = gen_new_label();
4923 int l2 = gen_new_label();
4924 TCGv t0 = tcg_temp_new();
4925 TCGv t1 = tcg_temp_local_new();
4926 TCGv t2 = tcg_temp_local_new();
4927 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4928 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4929 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4930 tcg_gen_subfi_tl(t2, 32, t2);
4931 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4932 tcg_gen_or_tl(t0, t0, t2);
4933 gen_store_spr(SPR_MQ, t0);
4934 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4935 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4936 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4937 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4938 gen_set_label(l1);
4939 tcg_temp_free(t0);
4940 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4941 tcg_gen_movi_tl(cpu_ca, 0);
4942 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4943 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4944 tcg_gen_movi_tl(cpu_ca, 1);
4945 gen_set_label(l2);
4946 tcg_temp_free(t1);
4947 tcg_temp_free(t2);
4948 if (unlikely(Rc(ctx->opcode) != 0))
4949 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4952 /* sre - sre. */
4953 static void gen_sre(DisasContext *ctx)
4955 TCGv t0 = tcg_temp_new();
4956 TCGv t1 = tcg_temp_new();
4957 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4958 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4959 tcg_gen_subfi_tl(t1, 32, t1);
4960 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4961 tcg_gen_or_tl(t1, t0, t1);
4962 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4963 gen_store_spr(SPR_MQ, t1);
4964 tcg_temp_free(t0);
4965 tcg_temp_free(t1);
4966 if (unlikely(Rc(ctx->opcode) != 0))
4967 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4970 /* srea - srea. */
4971 static void gen_srea(DisasContext *ctx)
4973 TCGv t0 = tcg_temp_new();
4974 TCGv t1 = tcg_temp_new();
4975 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4976 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4977 gen_store_spr(SPR_MQ, t0);
4978 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4979 tcg_temp_free(t0);
4980 tcg_temp_free(t1);
4981 if (unlikely(Rc(ctx->opcode) != 0))
4982 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4985 /* sreq */
4986 static void gen_sreq(DisasContext *ctx)
4988 TCGv t0 = tcg_temp_new();
4989 TCGv t1 = tcg_temp_new();
4990 TCGv t2 = tcg_temp_new();
4991 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4992 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4993 tcg_gen_shr_tl(t1, t1, t0);
4994 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4995 gen_load_spr(t2, SPR_MQ);
4996 gen_store_spr(SPR_MQ, t0);
4997 tcg_gen_and_tl(t0, t0, t1);
4998 tcg_gen_andc_tl(t2, t2, t1);
4999 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5000 tcg_temp_free(t0);
5001 tcg_temp_free(t1);
5002 tcg_temp_free(t2);
5003 if (unlikely(Rc(ctx->opcode) != 0))
5004 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5007 /* sriq */
5008 static void gen_sriq(DisasContext *ctx)
5010 int sh = SH(ctx->opcode);
5011 TCGv t0 = tcg_temp_new();
5012 TCGv t1 = tcg_temp_new();
5013 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5014 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5015 tcg_gen_or_tl(t1, t0, t1);
5016 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5017 gen_store_spr(SPR_MQ, t1);
5018 tcg_temp_free(t0);
5019 tcg_temp_free(t1);
5020 if (unlikely(Rc(ctx->opcode) != 0))
5021 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5024 /* srliq */
5025 static void gen_srliq(DisasContext *ctx)
5027 int sh = SH(ctx->opcode);
5028 TCGv t0 = tcg_temp_new();
5029 TCGv t1 = tcg_temp_new();
5030 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5031 gen_load_spr(t1, SPR_MQ);
5032 gen_store_spr(SPR_MQ, t0);
5033 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5034 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5035 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5036 tcg_temp_free(t0);
5037 tcg_temp_free(t1);
5038 if (unlikely(Rc(ctx->opcode) != 0))
5039 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5042 /* srlq */
5043 static void gen_srlq(DisasContext *ctx)
5045 int l1 = gen_new_label();
5046 int l2 = gen_new_label();
5047 TCGv t0 = tcg_temp_local_new();
5048 TCGv t1 = tcg_temp_local_new();
5049 TCGv t2 = tcg_temp_local_new();
5050 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5051 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5052 tcg_gen_shr_tl(t2, t1, t2);
5053 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5054 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5055 gen_load_spr(t0, SPR_MQ);
5056 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5057 tcg_gen_br(l2);
5058 gen_set_label(l1);
5059 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5060 tcg_gen_and_tl(t0, t0, t2);
5061 gen_load_spr(t1, SPR_MQ);
5062 tcg_gen_andc_tl(t1, t1, t2);
5063 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5064 gen_set_label(l2);
5065 tcg_temp_free(t0);
5066 tcg_temp_free(t1);
5067 tcg_temp_free(t2);
5068 if (unlikely(Rc(ctx->opcode) != 0))
5069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5072 /* srq */
5073 static void gen_srq(DisasContext *ctx)
5075 int l1 = gen_new_label();
5076 TCGv t0 = tcg_temp_new();
5077 TCGv t1 = tcg_temp_new();
5078 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5079 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5080 tcg_gen_subfi_tl(t1, 32, t1);
5081 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5082 tcg_gen_or_tl(t1, t0, t1);
5083 gen_store_spr(SPR_MQ, t1);
5084 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5085 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5086 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5087 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5088 gen_set_label(l1);
5089 tcg_temp_free(t0);
5090 tcg_temp_free(t1);
5091 if (unlikely(Rc(ctx->opcode) != 0))
5092 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5095 /* PowerPC 602 specific instructions */
5097 /* dsa */
5098 static void gen_dsa(DisasContext *ctx)
5100 /* XXX: TODO */
5101 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5104 /* esa */
5105 static void gen_esa(DisasContext *ctx)
5107 /* XXX: TODO */
5108 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5111 /* mfrom */
5112 static void gen_mfrom(DisasContext *ctx)
5114 #if defined(CONFIG_USER_ONLY)
5115 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5116 #else
5117 if (unlikely(!ctx->mem_idx)) {
5118 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5119 return;
5121 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5122 #endif
5125 /* 602 - 603 - G2 TLB management */
5127 /* tlbld */
5128 static void gen_tlbld_6xx(DisasContext *ctx)
5130 #if defined(CONFIG_USER_ONLY)
5131 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5132 #else
5133 if (unlikely(!ctx->mem_idx)) {
5134 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5135 return;
5137 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5138 #endif
5141 /* tlbli */
5142 static void gen_tlbli_6xx(DisasContext *ctx)
5144 #if defined(CONFIG_USER_ONLY)
5145 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5146 #else
5147 if (unlikely(!ctx->mem_idx)) {
5148 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5149 return;
5151 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5152 #endif
5155 /* 74xx TLB management */
5157 /* tlbld */
5158 static void gen_tlbld_74xx(DisasContext *ctx)
5160 #if defined(CONFIG_USER_ONLY)
5161 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5162 #else
5163 if (unlikely(!ctx->mem_idx)) {
5164 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5165 return;
5167 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5168 #endif
5171 /* tlbli */
5172 static void gen_tlbli_74xx(DisasContext *ctx)
5174 #if defined(CONFIG_USER_ONLY)
5175 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5176 #else
5177 if (unlikely(!ctx->mem_idx)) {
5178 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5179 return;
5181 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5182 #endif
5185 /* POWER instructions not in PowerPC 601 */
5187 /* clf */
5188 static void gen_clf(DisasContext *ctx)
5190 /* Cache line flush: implemented as no-op */
5193 /* cli */
5194 static void gen_cli(DisasContext *ctx)
5196 /* Cache line invalidate: privileged and treated as no-op */
5197 #if defined(CONFIG_USER_ONLY)
5198 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5199 #else
5200 if (unlikely(!ctx->mem_idx)) {
5201 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5202 return;
5204 #endif
5207 /* dclst */
5208 static void gen_dclst(DisasContext *ctx)
5210 /* Data cache line store: treated as no-op */
5213 static void gen_mfsri(DisasContext *ctx)
5215 #if defined(CONFIG_USER_ONLY)
5216 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5217 #else
5218 int ra = rA(ctx->opcode);
5219 int rd = rD(ctx->opcode);
5220 TCGv t0;
5221 if (unlikely(!ctx->mem_idx)) {
5222 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5223 return;
5225 t0 = tcg_temp_new();
5226 gen_addr_reg_index(ctx, t0);
5227 tcg_gen_shri_tl(t0, t0, 28);
5228 tcg_gen_andi_tl(t0, t0, 0xF);
5229 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
5230 tcg_temp_free(t0);
5231 if (ra != 0 && ra != rd)
5232 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5233 #endif
5236 static void gen_rac(DisasContext *ctx)
5238 #if defined(CONFIG_USER_ONLY)
5239 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5240 #else
5241 TCGv t0;
5242 if (unlikely(!ctx->mem_idx)) {
5243 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5244 return;
5246 t0 = tcg_temp_new();
5247 gen_addr_reg_index(ctx, t0);
5248 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5249 tcg_temp_free(t0);
5250 #endif
5253 static void gen_rfsvc(DisasContext *ctx)
5255 #if defined(CONFIG_USER_ONLY)
5256 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5257 #else
5258 if (unlikely(!ctx->mem_idx)) {
5259 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5260 return;
5262 gen_helper_rfsvc(cpu_env);
5263 gen_sync_exception(ctx);
5264 #endif
5267 /* svc is not implemented for now */
5269 /* POWER2 specific instructions */
5270 /* Quad manipulation (load/store two floats at a time) */
5272 /* lfq */
5273 static void gen_lfq(DisasContext *ctx)
5275 int rd = rD(ctx->opcode);
5276 TCGv t0;
5277 gen_set_access_type(ctx, ACCESS_FLOAT);
5278 t0 = tcg_temp_new();
5279 gen_addr_imm_index(ctx, t0, 0);
5280 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5281 gen_addr_add(ctx, t0, t0, 8);
5282 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5283 tcg_temp_free(t0);
5286 /* lfqu */
5287 static void gen_lfqu(DisasContext *ctx)
5289 int ra = rA(ctx->opcode);
5290 int rd = rD(ctx->opcode);
5291 TCGv t0, t1;
5292 gen_set_access_type(ctx, ACCESS_FLOAT);
5293 t0 = tcg_temp_new();
5294 t1 = tcg_temp_new();
5295 gen_addr_imm_index(ctx, t0, 0);
5296 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5297 gen_addr_add(ctx, t1, t0, 8);
5298 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5299 if (ra != 0)
5300 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5301 tcg_temp_free(t0);
5302 tcg_temp_free(t1);
5305 /* lfqux */
5306 static void gen_lfqux(DisasContext *ctx)
5308 int ra = rA(ctx->opcode);
5309 int rd = rD(ctx->opcode);
5310 gen_set_access_type(ctx, ACCESS_FLOAT);
5311 TCGv t0, t1;
5312 t0 = tcg_temp_new();
5313 gen_addr_reg_index(ctx, t0);
5314 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5315 t1 = tcg_temp_new();
5316 gen_addr_add(ctx, t1, t0, 8);
5317 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5318 tcg_temp_free(t1);
5319 if (ra != 0)
5320 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5321 tcg_temp_free(t0);
5324 /* lfqx */
5325 static void gen_lfqx(DisasContext *ctx)
5327 int rd = rD(ctx->opcode);
5328 TCGv t0;
5329 gen_set_access_type(ctx, ACCESS_FLOAT);
5330 t0 = tcg_temp_new();
5331 gen_addr_reg_index(ctx, t0);
5332 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5333 gen_addr_add(ctx, t0, t0, 8);
5334 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5335 tcg_temp_free(t0);
5338 /* stfq */
5339 static void gen_stfq(DisasContext *ctx)
5341 int rd = rD(ctx->opcode);
5342 TCGv t0;
5343 gen_set_access_type(ctx, ACCESS_FLOAT);
5344 t0 = tcg_temp_new();
5345 gen_addr_imm_index(ctx, t0, 0);
5346 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5347 gen_addr_add(ctx, t0, t0, 8);
5348 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5349 tcg_temp_free(t0);
5352 /* stfqu */
5353 static void gen_stfqu(DisasContext *ctx)
5355 int ra = rA(ctx->opcode);
5356 int rd = rD(ctx->opcode);
5357 TCGv t0, t1;
5358 gen_set_access_type(ctx, ACCESS_FLOAT);
5359 t0 = tcg_temp_new();
5360 gen_addr_imm_index(ctx, t0, 0);
5361 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5362 t1 = tcg_temp_new();
5363 gen_addr_add(ctx, t1, t0, 8);
5364 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5365 tcg_temp_free(t1);
5366 if (ra != 0)
5367 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5368 tcg_temp_free(t0);
5371 /* stfqux */
5372 static void gen_stfqux(DisasContext *ctx)
5374 int ra = rA(ctx->opcode);
5375 int rd = rD(ctx->opcode);
5376 TCGv t0, t1;
5377 gen_set_access_type(ctx, ACCESS_FLOAT);
5378 t0 = tcg_temp_new();
5379 gen_addr_reg_index(ctx, t0);
5380 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5381 t1 = tcg_temp_new();
5382 gen_addr_add(ctx, t1, t0, 8);
5383 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5384 tcg_temp_free(t1);
5385 if (ra != 0)
5386 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5387 tcg_temp_free(t0);
5390 /* stfqx */
5391 static void gen_stfqx(DisasContext *ctx)
5393 int rd = rD(ctx->opcode);
5394 TCGv t0;
5395 gen_set_access_type(ctx, ACCESS_FLOAT);
5396 t0 = tcg_temp_new();
5397 gen_addr_reg_index(ctx, t0);
5398 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5399 gen_addr_add(ctx, t0, t0, 8);
5400 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5401 tcg_temp_free(t0);
5404 /* BookE specific instructions */
5406 /* XXX: not implemented on 440 ? */
5407 static void gen_mfapidi(DisasContext *ctx)
5409 /* XXX: TODO */
5410 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5413 /* XXX: not implemented on 440 ? */
5414 static void gen_tlbiva(DisasContext *ctx)
5416 #if defined(CONFIG_USER_ONLY)
5417 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5418 #else
5419 TCGv t0;
5420 if (unlikely(!ctx->mem_idx)) {
5421 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5422 return;
5424 t0 = tcg_temp_new();
5425 gen_addr_reg_index(ctx, t0);
5426 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5427 tcg_temp_free(t0);
5428 #endif
5431 /* All 405 MAC instructions are translated here */
5432 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5433 int ra, int rb, int rt, int Rc)
5435 TCGv t0, t1;
5437 t0 = tcg_temp_local_new();
5438 t1 = tcg_temp_local_new();
5440 switch (opc3 & 0x0D) {
5441 case 0x05:
5442 /* macchw - macchw. - macchwo - macchwo. */
5443 /* macchws - macchws. - macchwso - macchwso. */
5444 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5445 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5446 /* mulchw - mulchw. */
5447 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5448 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5449 tcg_gen_ext16s_tl(t1, t1);
5450 break;
5451 case 0x04:
5452 /* macchwu - macchwu. - macchwuo - macchwuo. */
5453 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5454 /* mulchwu - mulchwu. */
5455 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5456 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5457 tcg_gen_ext16u_tl(t1, t1);
5458 break;
5459 case 0x01:
5460 /* machhw - machhw. - machhwo - machhwo. */
5461 /* machhws - machhws. - machhwso - machhwso. */
5462 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5463 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5464 /* mulhhw - mulhhw. */
5465 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5466 tcg_gen_ext16s_tl(t0, t0);
5467 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5468 tcg_gen_ext16s_tl(t1, t1);
5469 break;
5470 case 0x00:
5471 /* machhwu - machhwu. - machhwuo - machhwuo. */
5472 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5473 /* mulhhwu - mulhhwu. */
5474 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5475 tcg_gen_ext16u_tl(t0, t0);
5476 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5477 tcg_gen_ext16u_tl(t1, t1);
5478 break;
5479 case 0x0D:
5480 /* maclhw - maclhw. - maclhwo - maclhwo. */
5481 /* maclhws - maclhws. - maclhwso - maclhwso. */
5482 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5483 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5484 /* mullhw - mullhw. */
5485 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5486 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5487 break;
5488 case 0x0C:
5489 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5490 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5491 /* mullhwu - mullhwu. */
5492 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5493 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5494 break;
5496 if (opc2 & 0x04) {
5497 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5498 tcg_gen_mul_tl(t1, t0, t1);
5499 if (opc2 & 0x02) {
5500 /* nmultiply-and-accumulate (0x0E) */
5501 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5502 } else {
5503 /* multiply-and-accumulate (0x0C) */
5504 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5507 if (opc3 & 0x12) {
5508 /* Check overflow and/or saturate */
5509 int l1 = gen_new_label();
5511 if (opc3 & 0x10) {
5512 /* Start with XER OV disabled, the most likely case */
5513 tcg_gen_movi_tl(cpu_ov, 0);
5515 if (opc3 & 0x01) {
5516 /* Signed */
5517 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5518 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5519 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5520 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5521 if (opc3 & 0x02) {
5522 /* Saturate */
5523 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5524 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5526 } else {
5527 /* Unsigned */
5528 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5529 if (opc3 & 0x02) {
5530 /* Saturate */
5531 tcg_gen_movi_tl(t0, UINT32_MAX);
5534 if (opc3 & 0x10) {
5535 /* Check overflow */
5536 tcg_gen_movi_tl(cpu_ov, 1);
5537 tcg_gen_movi_tl(cpu_so, 1);
5539 gen_set_label(l1);
5540 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5542 } else {
5543 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5545 tcg_temp_free(t0);
5546 tcg_temp_free(t1);
5547 if (unlikely(Rc) != 0) {
5548 /* Update Rc0 */
5549 gen_set_Rc0(ctx, cpu_gpr[rt]);
5553 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5554 static void glue(gen_, name)(DisasContext *ctx) \
5556 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5557 rD(ctx->opcode), Rc(ctx->opcode)); \
5560 /* macchw - macchw. */
5561 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5562 /* macchwo - macchwo. */
5563 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5564 /* macchws - macchws. */
5565 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5566 /* macchwso - macchwso. */
5567 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5568 /* macchwsu - macchwsu. */
5569 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5570 /* macchwsuo - macchwsuo. */
5571 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5572 /* macchwu - macchwu. */
5573 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5574 /* macchwuo - macchwuo. */
5575 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5576 /* machhw - machhw. */
5577 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5578 /* machhwo - machhwo. */
5579 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5580 /* machhws - machhws. */
5581 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5582 /* machhwso - machhwso. */
5583 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5584 /* machhwsu - machhwsu. */
5585 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5586 /* machhwsuo - machhwsuo. */
5587 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5588 /* machhwu - machhwu. */
5589 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5590 /* machhwuo - machhwuo. */
5591 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5592 /* maclhw - maclhw. */
5593 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5594 /* maclhwo - maclhwo. */
5595 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5596 /* maclhws - maclhws. */
5597 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5598 /* maclhwso - maclhwso. */
5599 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5600 /* maclhwu - maclhwu. */
5601 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5602 /* maclhwuo - maclhwuo. */
5603 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5604 /* maclhwsu - maclhwsu. */
5605 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5606 /* maclhwsuo - maclhwsuo. */
5607 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5608 /* nmacchw - nmacchw. */
5609 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5610 /* nmacchwo - nmacchwo. */
5611 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5612 /* nmacchws - nmacchws. */
5613 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5614 /* nmacchwso - nmacchwso. */
5615 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5616 /* nmachhw - nmachhw. */
5617 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5618 /* nmachhwo - nmachhwo. */
5619 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5620 /* nmachhws - nmachhws. */
5621 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5622 /* nmachhwso - nmachhwso. */
5623 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5624 /* nmaclhw - nmaclhw. */
5625 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5626 /* nmaclhwo - nmaclhwo. */
5627 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5628 /* nmaclhws - nmaclhws. */
5629 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5630 /* nmaclhwso - nmaclhwso. */
5631 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5633 /* mulchw - mulchw. */
5634 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5635 /* mulchwu - mulchwu. */
5636 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5637 /* mulhhw - mulhhw. */
5638 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5639 /* mulhhwu - mulhhwu. */
5640 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5641 /* mullhw - mullhw. */
5642 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5643 /* mullhwu - mullhwu. */
5644 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5646 /* mfdcr */
5647 static void gen_mfdcr(DisasContext *ctx)
5649 #if defined(CONFIG_USER_ONLY)
5650 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5651 #else
5652 TCGv dcrn;
5653 if (unlikely(!ctx->mem_idx)) {
5654 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5655 return;
5657 /* NIP cannot be restored if the memory exception comes from an helper */
5658 gen_update_nip(ctx, ctx->nip - 4);
5659 dcrn = tcg_const_tl(SPR(ctx->opcode));
5660 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5661 tcg_temp_free(dcrn);
5662 #endif
5665 /* mtdcr */
5666 static void gen_mtdcr(DisasContext *ctx)
5668 #if defined(CONFIG_USER_ONLY)
5669 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5670 #else
5671 TCGv dcrn;
5672 if (unlikely(!ctx->mem_idx)) {
5673 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5674 return;
5676 /* NIP cannot be restored if the memory exception comes from an helper */
5677 gen_update_nip(ctx, ctx->nip - 4);
5678 dcrn = tcg_const_tl(SPR(ctx->opcode));
5679 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5680 tcg_temp_free(dcrn);
5681 #endif
5684 /* mfdcrx */
5685 /* XXX: not implemented on 440 ? */
5686 static void gen_mfdcrx(DisasContext *ctx)
5688 #if defined(CONFIG_USER_ONLY)
5689 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5690 #else
5691 if (unlikely(!ctx->mem_idx)) {
5692 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5693 return;
5695 /* NIP cannot be restored if the memory exception comes from an helper */
5696 gen_update_nip(ctx, ctx->nip - 4);
5697 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5698 cpu_gpr[rA(ctx->opcode)]);
5699 /* Note: Rc update flag set leads to undefined state of Rc0 */
5700 #endif
5703 /* mtdcrx */
5704 /* XXX: not implemented on 440 ? */
5705 static void gen_mtdcrx(DisasContext *ctx)
5707 #if defined(CONFIG_USER_ONLY)
5708 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5709 #else
5710 if (unlikely(!ctx->mem_idx)) {
5711 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5712 return;
5714 /* NIP cannot be restored if the memory exception comes from an helper */
5715 gen_update_nip(ctx, ctx->nip - 4);
5716 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5717 cpu_gpr[rS(ctx->opcode)]);
5718 /* Note: Rc update flag set leads to undefined state of Rc0 */
5719 #endif
5722 /* mfdcrux (PPC 460) : user-mode access to DCR */
5723 static void gen_mfdcrux(DisasContext *ctx)
5725 /* NIP cannot be restored if the memory exception comes from an helper */
5726 gen_update_nip(ctx, ctx->nip - 4);
5727 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5728 cpu_gpr[rA(ctx->opcode)]);
5729 /* Note: Rc update flag set leads to undefined state of Rc0 */
5732 /* mtdcrux (PPC 460) : user-mode access to DCR */
5733 static void gen_mtdcrux(DisasContext *ctx)
5735 /* NIP cannot be restored if the memory exception comes from an helper */
5736 gen_update_nip(ctx, ctx->nip - 4);
5737 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5738 cpu_gpr[rS(ctx->opcode)]);
5739 /* Note: Rc update flag set leads to undefined state of Rc0 */
5742 /* dccci */
5743 static void gen_dccci(DisasContext *ctx)
5745 #if defined(CONFIG_USER_ONLY)
5746 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5747 #else
5748 if (unlikely(!ctx->mem_idx)) {
5749 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5750 return;
5752 /* interpreted as no-op */
5753 #endif
5756 /* dcread */
5757 static void gen_dcread(DisasContext *ctx)
5759 #if defined(CONFIG_USER_ONLY)
5760 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5761 #else
5762 TCGv EA, val;
5763 if (unlikely(!ctx->mem_idx)) {
5764 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5765 return;
5767 gen_set_access_type(ctx, ACCESS_CACHE);
5768 EA = tcg_temp_new();
5769 gen_addr_reg_index(ctx, EA);
5770 val = tcg_temp_new();
5771 gen_qemu_ld32u(ctx, val, EA);
5772 tcg_temp_free(val);
5773 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5774 tcg_temp_free(EA);
5775 #endif
5778 /* icbt */
5779 static void gen_icbt_40x(DisasContext *ctx)
5781 /* interpreted as no-op */
5782 /* XXX: specification say this is treated as a load by the MMU
5783 * but does not generate any exception
5787 /* iccci */
5788 static void gen_iccci(DisasContext *ctx)
5790 #if defined(CONFIG_USER_ONLY)
5791 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5792 #else
5793 if (unlikely(!ctx->mem_idx)) {
5794 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5795 return;
5797 /* interpreted as no-op */
5798 #endif
5801 /* icread */
5802 static void gen_icread(DisasContext *ctx)
5804 #if defined(CONFIG_USER_ONLY)
5805 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5806 #else
5807 if (unlikely(!ctx->mem_idx)) {
5808 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5809 return;
5811 /* interpreted as no-op */
5812 #endif
5815 /* rfci (mem_idx only) */
5816 static void gen_rfci_40x(DisasContext *ctx)
5818 #if defined(CONFIG_USER_ONLY)
5819 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5820 #else
5821 if (unlikely(!ctx->mem_idx)) {
5822 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5823 return;
5825 /* Restore CPU state */
5826 gen_helper_40x_rfci(cpu_env);
5827 gen_sync_exception(ctx);
5828 #endif
5831 static void gen_rfci(DisasContext *ctx)
5833 #if defined(CONFIG_USER_ONLY)
5834 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5835 #else
5836 if (unlikely(!ctx->mem_idx)) {
5837 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5838 return;
5840 /* Restore CPU state */
5841 gen_helper_rfci(cpu_env);
5842 gen_sync_exception(ctx);
5843 #endif
5846 /* BookE specific */
5848 /* XXX: not implemented on 440 ? */
5849 static void gen_rfdi(DisasContext *ctx)
5851 #if defined(CONFIG_USER_ONLY)
5852 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5853 #else
5854 if (unlikely(!ctx->mem_idx)) {
5855 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5856 return;
5858 /* Restore CPU state */
5859 gen_helper_rfdi(cpu_env);
5860 gen_sync_exception(ctx);
5861 #endif
5864 /* XXX: not implemented on 440 ? */
5865 static void gen_rfmci(DisasContext *ctx)
5867 #if defined(CONFIG_USER_ONLY)
5868 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5869 #else
5870 if (unlikely(!ctx->mem_idx)) {
5871 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5872 return;
5874 /* Restore CPU state */
5875 gen_helper_rfmci(cpu_env);
5876 gen_sync_exception(ctx);
5877 #endif
5880 /* TLB management - PowerPC 405 implementation */
5882 /* tlbre */
5883 static void gen_tlbre_40x(DisasContext *ctx)
5885 #if defined(CONFIG_USER_ONLY)
5886 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5887 #else
5888 if (unlikely(!ctx->mem_idx)) {
5889 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5890 return;
5892 switch (rB(ctx->opcode)) {
5893 case 0:
5894 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5895 cpu_gpr[rA(ctx->opcode)]);
5896 break;
5897 case 1:
5898 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5899 cpu_gpr[rA(ctx->opcode)]);
5900 break;
5901 default:
5902 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5903 break;
5905 #endif
5908 /* tlbsx - tlbsx. */
5909 static void gen_tlbsx_40x(DisasContext *ctx)
5911 #if defined(CONFIG_USER_ONLY)
5912 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5913 #else
5914 TCGv t0;
5915 if (unlikely(!ctx->mem_idx)) {
5916 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5917 return;
5919 t0 = tcg_temp_new();
5920 gen_addr_reg_index(ctx, t0);
5921 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5922 tcg_temp_free(t0);
5923 if (Rc(ctx->opcode)) {
5924 int l1 = gen_new_label();
5925 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5926 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5927 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5928 gen_set_label(l1);
5930 #endif
5933 /* tlbwe */
5934 static void gen_tlbwe_40x(DisasContext *ctx)
5936 #if defined(CONFIG_USER_ONLY)
5937 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5938 #else
5939 if (unlikely(!ctx->mem_idx)) {
5940 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5941 return;
5943 switch (rB(ctx->opcode)) {
5944 case 0:
5945 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
5946 cpu_gpr[rS(ctx->opcode)]);
5947 break;
5948 case 1:
5949 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
5950 cpu_gpr[rS(ctx->opcode)]);
5951 break;
5952 default:
5953 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5954 break;
5956 #endif
5959 /* TLB management - PowerPC 440 implementation */
5961 /* tlbre */
5962 static void gen_tlbre_440(DisasContext *ctx)
5964 #if defined(CONFIG_USER_ONLY)
5965 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5966 #else
5967 if (unlikely(!ctx->mem_idx)) {
5968 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5969 return;
5971 switch (rB(ctx->opcode)) {
5972 case 0:
5973 case 1:
5974 case 2:
5976 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5977 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
5978 t0, cpu_gpr[rA(ctx->opcode)]);
5979 tcg_temp_free_i32(t0);
5981 break;
5982 default:
5983 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5984 break;
5986 #endif
5989 /* tlbsx - tlbsx. */
5990 static void gen_tlbsx_440(DisasContext *ctx)
5992 #if defined(CONFIG_USER_ONLY)
5993 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5994 #else
5995 TCGv t0;
5996 if (unlikely(!ctx->mem_idx)) {
5997 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5998 return;
6000 t0 = tcg_temp_new();
6001 gen_addr_reg_index(ctx, t0);
6002 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6003 tcg_temp_free(t0);
6004 if (Rc(ctx->opcode)) {
6005 int l1 = gen_new_label();
6006 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6007 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6008 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6009 gen_set_label(l1);
6011 #endif
6014 /* tlbwe */
6015 static void gen_tlbwe_440(DisasContext *ctx)
6017 #if defined(CONFIG_USER_ONLY)
6018 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6019 #else
6020 if (unlikely(!ctx->mem_idx)) {
6021 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6022 return;
6024 switch (rB(ctx->opcode)) {
6025 case 0:
6026 case 1:
6027 case 2:
6029 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6030 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6031 cpu_gpr[rS(ctx->opcode)]);
6032 tcg_temp_free_i32(t0);
6034 break;
6035 default:
6036 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6037 break;
6039 #endif
6042 /* TLB management - PowerPC BookE 2.06 implementation */
6044 /* tlbre */
6045 static void gen_tlbre_booke206(DisasContext *ctx)
6047 #if defined(CONFIG_USER_ONLY)
6048 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6049 #else
6050 if (unlikely(!ctx->mem_idx)) {
6051 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6052 return;
6055 gen_helper_booke206_tlbre(cpu_env);
6056 #endif
6059 /* tlbsx - tlbsx. */
6060 static void gen_tlbsx_booke206(DisasContext *ctx)
6062 #if defined(CONFIG_USER_ONLY)
6063 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6064 #else
6065 TCGv t0;
6066 if (unlikely(!ctx->mem_idx)) {
6067 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6068 return;
6071 if (rA(ctx->opcode)) {
6072 t0 = tcg_temp_new();
6073 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6074 } else {
6075 t0 = tcg_const_tl(0);
6078 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6079 gen_helper_booke206_tlbsx(cpu_env, t0);
6080 #endif
6083 /* tlbwe */
6084 static void gen_tlbwe_booke206(DisasContext *ctx)
6086 #if defined(CONFIG_USER_ONLY)
6087 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6088 #else
6089 if (unlikely(!ctx->mem_idx)) {
6090 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6091 return;
6093 gen_update_nip(ctx, ctx->nip - 4);
6094 gen_helper_booke206_tlbwe(cpu_env);
6095 #endif
6098 static void gen_tlbivax_booke206(DisasContext *ctx)
6100 #if defined(CONFIG_USER_ONLY)
6101 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6102 #else
6103 TCGv t0;
6104 if (unlikely(!ctx->mem_idx)) {
6105 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6106 return;
6109 t0 = tcg_temp_new();
6110 gen_addr_reg_index(ctx, t0);
6112 gen_helper_booke206_tlbivax(cpu_env, t0);
6113 #endif
6116 static void gen_tlbilx_booke206(DisasContext *ctx)
6118 #if defined(CONFIG_USER_ONLY)
6119 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6120 #else
6121 TCGv t0;
6122 if (unlikely(!ctx->mem_idx)) {
6123 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6124 return;
6127 t0 = tcg_temp_new();
6128 gen_addr_reg_index(ctx, t0);
6130 switch((ctx->opcode >> 21) & 0x3) {
6131 case 0:
6132 gen_helper_booke206_tlbilx0(cpu_env, t0);
6133 break;
6134 case 1:
6135 gen_helper_booke206_tlbilx1(cpu_env, t0);
6136 break;
6137 case 3:
6138 gen_helper_booke206_tlbilx3(cpu_env, t0);
6139 break;
6140 default:
6141 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6142 break;
6145 tcg_temp_free(t0);
6146 #endif
6150 /* wrtee */
6151 static void gen_wrtee(DisasContext *ctx)
6153 #if defined(CONFIG_USER_ONLY)
6154 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6155 #else
6156 TCGv t0;
6157 if (unlikely(!ctx->mem_idx)) {
6158 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6159 return;
6161 t0 = tcg_temp_new();
6162 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6163 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6164 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6165 tcg_temp_free(t0);
6166 /* Stop translation to have a chance to raise an exception
6167 * if we just set msr_ee to 1
6169 gen_stop_exception(ctx);
6170 #endif
6173 /* wrteei */
6174 static void gen_wrteei(DisasContext *ctx)
6176 #if defined(CONFIG_USER_ONLY)
6177 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6178 #else
6179 if (unlikely(!ctx->mem_idx)) {
6180 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6181 return;
6183 if (ctx->opcode & 0x00008000) {
6184 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6185 /* Stop translation to have a chance to raise an exception */
6186 gen_stop_exception(ctx);
6187 } else {
6188 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6190 #endif
6193 /* PowerPC 440 specific instructions */
6195 /* dlmzb */
6196 static void gen_dlmzb(DisasContext *ctx)
6198 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6199 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6200 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6201 tcg_temp_free_i32(t0);
6204 /* mbar replaces eieio on 440 */
6205 static void gen_mbar(DisasContext *ctx)
6207 /* interpreted as no-op */
6210 /* msync replaces sync on 440 */
6211 static void gen_msync_4xx(DisasContext *ctx)
6213 /* interpreted as no-op */
6216 /* icbt */
6217 static void gen_icbt_440(DisasContext *ctx)
6219 /* interpreted as no-op */
6220 /* XXX: specification say this is treated as a load by the MMU
6221 * but does not generate any exception
6225 /* Embedded.Processor Control */
6227 static void gen_msgclr(DisasContext *ctx)
6229 #if defined(CONFIG_USER_ONLY)
6230 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6231 #else
6232 if (unlikely(ctx->mem_idx == 0)) {
6233 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6234 return;
6237 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6238 #endif
6241 static void gen_msgsnd(DisasContext *ctx)
6243 #if defined(CONFIG_USER_ONLY)
6244 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6245 #else
6246 if (unlikely(ctx->mem_idx == 0)) {
6247 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6248 return;
6251 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6252 #endif
6255 /*** Altivec vector extension ***/
6256 /* Altivec registers moves */
6258 static inline TCGv_ptr gen_avr_ptr(int reg)
6260 TCGv_ptr r = tcg_temp_new_ptr();
6261 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6262 return r;
6265 #define GEN_VR_LDX(name, opc2, opc3) \
6266 static void glue(gen_, name)(DisasContext *ctx) \
6268 TCGv EA; \
6269 if (unlikely(!ctx->altivec_enabled)) { \
6270 gen_exception(ctx, POWERPC_EXCP_VPU); \
6271 return; \
6273 gen_set_access_type(ctx, ACCESS_INT); \
6274 EA = tcg_temp_new(); \
6275 gen_addr_reg_index(ctx, EA); \
6276 tcg_gen_andi_tl(EA, EA, ~0xf); \
6277 if (ctx->le_mode) { \
6278 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6279 tcg_gen_addi_tl(EA, EA, 8); \
6280 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6281 } else { \
6282 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6283 tcg_gen_addi_tl(EA, EA, 8); \
6284 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6286 tcg_temp_free(EA); \
6289 #define GEN_VR_STX(name, opc2, opc3) \
6290 static void gen_st##name(DisasContext *ctx) \
6292 TCGv EA; \
6293 if (unlikely(!ctx->altivec_enabled)) { \
6294 gen_exception(ctx, POWERPC_EXCP_VPU); \
6295 return; \
6297 gen_set_access_type(ctx, ACCESS_INT); \
6298 EA = tcg_temp_new(); \
6299 gen_addr_reg_index(ctx, EA); \
6300 tcg_gen_andi_tl(EA, EA, ~0xf); \
6301 if (ctx->le_mode) { \
6302 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6303 tcg_gen_addi_tl(EA, EA, 8); \
6304 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6305 } else { \
6306 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6307 tcg_gen_addi_tl(EA, EA, 8); \
6308 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6310 tcg_temp_free(EA); \
6313 #define GEN_VR_LVE(name, opc2, opc3) \
6314 static void gen_lve##name(DisasContext *ctx) \
6316 TCGv EA; \
6317 TCGv_ptr rs; \
6318 if (unlikely(!ctx->altivec_enabled)) { \
6319 gen_exception(ctx, POWERPC_EXCP_VPU); \
6320 return; \
6322 gen_set_access_type(ctx, ACCESS_INT); \
6323 EA = tcg_temp_new(); \
6324 gen_addr_reg_index(ctx, EA); \
6325 rs = gen_avr_ptr(rS(ctx->opcode)); \
6326 gen_helper_lve##name(cpu_env, rs, EA); \
6327 tcg_temp_free(EA); \
6328 tcg_temp_free_ptr(rs); \
6331 #define GEN_VR_STVE(name, opc2, opc3) \
6332 static void gen_stve##name(DisasContext *ctx) \
6334 TCGv EA; \
6335 TCGv_ptr rs; \
6336 if (unlikely(!ctx->altivec_enabled)) { \
6337 gen_exception(ctx, POWERPC_EXCP_VPU); \
6338 return; \
6340 gen_set_access_type(ctx, ACCESS_INT); \
6341 EA = tcg_temp_new(); \
6342 gen_addr_reg_index(ctx, EA); \
6343 rs = gen_avr_ptr(rS(ctx->opcode)); \
6344 gen_helper_stve##name(cpu_env, rs, EA); \
6345 tcg_temp_free(EA); \
6346 tcg_temp_free_ptr(rs); \
6349 GEN_VR_LDX(lvx, 0x07, 0x03);
6350 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6351 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6353 GEN_VR_LVE(bx, 0x07, 0x00);
6354 GEN_VR_LVE(hx, 0x07, 0x01);
6355 GEN_VR_LVE(wx, 0x07, 0x02);
6357 GEN_VR_STX(svx, 0x07, 0x07);
6358 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6359 GEN_VR_STX(svxl, 0x07, 0x0F);
6361 GEN_VR_STVE(bx, 0x07, 0x04);
6362 GEN_VR_STVE(hx, 0x07, 0x05);
6363 GEN_VR_STVE(wx, 0x07, 0x06);
6365 static void gen_lvsl(DisasContext *ctx)
6367 TCGv_ptr rd;
6368 TCGv EA;
6369 if (unlikely(!ctx->altivec_enabled)) {
6370 gen_exception(ctx, POWERPC_EXCP_VPU);
6371 return;
6373 EA = tcg_temp_new();
6374 gen_addr_reg_index(ctx, EA);
6375 rd = gen_avr_ptr(rD(ctx->opcode));
6376 gen_helper_lvsl(rd, EA);
6377 tcg_temp_free(EA);
6378 tcg_temp_free_ptr(rd);
6381 static void gen_lvsr(DisasContext *ctx)
6383 TCGv_ptr rd;
6384 TCGv EA;
6385 if (unlikely(!ctx->altivec_enabled)) {
6386 gen_exception(ctx, POWERPC_EXCP_VPU);
6387 return;
6389 EA = tcg_temp_new();
6390 gen_addr_reg_index(ctx, EA);
6391 rd = gen_avr_ptr(rD(ctx->opcode));
6392 gen_helper_lvsr(rd, EA);
6393 tcg_temp_free(EA);
6394 tcg_temp_free_ptr(rd);
6397 static void gen_mfvscr(DisasContext *ctx)
6399 TCGv_i32 t;
6400 if (unlikely(!ctx->altivec_enabled)) {
6401 gen_exception(ctx, POWERPC_EXCP_VPU);
6402 return;
6404 tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6405 t = tcg_temp_new_i32();
6406 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, vscr));
6407 tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6408 tcg_temp_free_i32(t);
6411 static void gen_mtvscr(DisasContext *ctx)
6413 TCGv_ptr p;
6414 if (unlikely(!ctx->altivec_enabled)) {
6415 gen_exception(ctx, POWERPC_EXCP_VPU);
6416 return;
6418 p = gen_avr_ptr(rD(ctx->opcode));
6419 gen_helper_mtvscr(cpu_env, p);
6420 tcg_temp_free_ptr(p);
6423 /* Logical operations */
6424 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6425 static void glue(gen_, name)(DisasContext *ctx) \
6427 if (unlikely(!ctx->altivec_enabled)) { \
6428 gen_exception(ctx, POWERPC_EXCP_VPU); \
6429 return; \
6431 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6432 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6435 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6436 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6437 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6438 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6439 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6441 #define GEN_VXFORM(name, opc2, opc3) \
6442 static void glue(gen_, name)(DisasContext *ctx) \
6444 TCGv_ptr ra, rb, rd; \
6445 if (unlikely(!ctx->altivec_enabled)) { \
6446 gen_exception(ctx, POWERPC_EXCP_VPU); \
6447 return; \
6449 ra = gen_avr_ptr(rA(ctx->opcode)); \
6450 rb = gen_avr_ptr(rB(ctx->opcode)); \
6451 rd = gen_avr_ptr(rD(ctx->opcode)); \
6452 gen_helper_##name (rd, ra, rb); \
6453 tcg_temp_free_ptr(ra); \
6454 tcg_temp_free_ptr(rb); \
6455 tcg_temp_free_ptr(rd); \
6458 #define GEN_VXFORM_ENV(name, opc2, opc3) \
6459 static void glue(gen_, name)(DisasContext *ctx) \
6461 TCGv_ptr ra, rb, rd; \
6462 if (unlikely(!ctx->altivec_enabled)) { \
6463 gen_exception(ctx, POWERPC_EXCP_VPU); \
6464 return; \
6466 ra = gen_avr_ptr(rA(ctx->opcode)); \
6467 rb = gen_avr_ptr(rB(ctx->opcode)); \
6468 rd = gen_avr_ptr(rD(ctx->opcode)); \
6469 gen_helper_##name(cpu_env, rd, ra, rb); \
6470 tcg_temp_free_ptr(ra); \
6471 tcg_temp_free_ptr(rb); \
6472 tcg_temp_free_ptr(rd); \
6475 GEN_VXFORM(vaddubm, 0, 0);
6476 GEN_VXFORM(vadduhm, 0, 1);
6477 GEN_VXFORM(vadduwm, 0, 2);
6478 GEN_VXFORM(vsububm, 0, 16);
6479 GEN_VXFORM(vsubuhm, 0, 17);
6480 GEN_VXFORM(vsubuwm, 0, 18);
6481 GEN_VXFORM(vmaxub, 1, 0);
6482 GEN_VXFORM(vmaxuh, 1, 1);
6483 GEN_VXFORM(vmaxuw, 1, 2);
6484 GEN_VXFORM(vmaxsb, 1, 4);
6485 GEN_VXFORM(vmaxsh, 1, 5);
6486 GEN_VXFORM(vmaxsw, 1, 6);
6487 GEN_VXFORM(vminub, 1, 8);
6488 GEN_VXFORM(vminuh, 1, 9);
6489 GEN_VXFORM(vminuw, 1, 10);
6490 GEN_VXFORM(vminsb, 1, 12);
6491 GEN_VXFORM(vminsh, 1, 13);
6492 GEN_VXFORM(vminsw, 1, 14);
6493 GEN_VXFORM(vavgub, 1, 16);
6494 GEN_VXFORM(vavguh, 1, 17);
6495 GEN_VXFORM(vavguw, 1, 18);
6496 GEN_VXFORM(vavgsb, 1, 20);
6497 GEN_VXFORM(vavgsh, 1, 21);
6498 GEN_VXFORM(vavgsw, 1, 22);
6499 GEN_VXFORM(vmrghb, 6, 0);
6500 GEN_VXFORM(vmrghh, 6, 1);
6501 GEN_VXFORM(vmrghw, 6, 2);
6502 GEN_VXFORM(vmrglb, 6, 4);
6503 GEN_VXFORM(vmrglh, 6, 5);
6504 GEN_VXFORM(vmrglw, 6, 6);
6505 GEN_VXFORM(vmuloub, 4, 0);
6506 GEN_VXFORM(vmulouh, 4, 1);
6507 GEN_VXFORM(vmulosb, 4, 4);
6508 GEN_VXFORM(vmulosh, 4, 5);
6509 GEN_VXFORM(vmuleub, 4, 8);
6510 GEN_VXFORM(vmuleuh, 4, 9);
6511 GEN_VXFORM(vmulesb, 4, 12);
6512 GEN_VXFORM(vmulesh, 4, 13);
6513 GEN_VXFORM(vslb, 2, 4);
6514 GEN_VXFORM(vslh, 2, 5);
6515 GEN_VXFORM(vslw, 2, 6);
6516 GEN_VXFORM(vsrb, 2, 8);
6517 GEN_VXFORM(vsrh, 2, 9);
6518 GEN_VXFORM(vsrw, 2, 10);
6519 GEN_VXFORM(vsrab, 2, 12);
6520 GEN_VXFORM(vsrah, 2, 13);
6521 GEN_VXFORM(vsraw, 2, 14);
6522 GEN_VXFORM(vslo, 6, 16);
6523 GEN_VXFORM(vsro, 6, 17);
6524 GEN_VXFORM(vaddcuw, 0, 6);
6525 GEN_VXFORM(vsubcuw, 0, 22);
6526 GEN_VXFORM_ENV(vaddubs, 0, 8);
6527 GEN_VXFORM_ENV(vadduhs, 0, 9);
6528 GEN_VXFORM_ENV(vadduws, 0, 10);
6529 GEN_VXFORM_ENV(vaddsbs, 0, 12);
6530 GEN_VXFORM_ENV(vaddshs, 0, 13);
6531 GEN_VXFORM_ENV(vaddsws, 0, 14);
6532 GEN_VXFORM_ENV(vsububs, 0, 24);
6533 GEN_VXFORM_ENV(vsubuhs, 0, 25);
6534 GEN_VXFORM_ENV(vsubuws, 0, 26);
6535 GEN_VXFORM_ENV(vsubsbs, 0, 28);
6536 GEN_VXFORM_ENV(vsubshs, 0, 29);
6537 GEN_VXFORM_ENV(vsubsws, 0, 30);
6538 GEN_VXFORM(vrlb, 2, 0);
6539 GEN_VXFORM(vrlh, 2, 1);
6540 GEN_VXFORM(vrlw, 2, 2);
6541 GEN_VXFORM(vsl, 2, 7);
6542 GEN_VXFORM(vsr, 2, 11);
6543 GEN_VXFORM_ENV(vpkuhum, 7, 0);
6544 GEN_VXFORM_ENV(vpkuwum, 7, 1);
6545 GEN_VXFORM_ENV(vpkuhus, 7, 2);
6546 GEN_VXFORM_ENV(vpkuwus, 7, 3);
6547 GEN_VXFORM_ENV(vpkshus, 7, 4);
6548 GEN_VXFORM_ENV(vpkswus, 7, 5);
6549 GEN_VXFORM_ENV(vpkshss, 7, 6);
6550 GEN_VXFORM_ENV(vpkswss, 7, 7);
6551 GEN_VXFORM(vpkpx, 7, 12);
6552 GEN_VXFORM_ENV(vsum4ubs, 4, 24);
6553 GEN_VXFORM_ENV(vsum4sbs, 4, 28);
6554 GEN_VXFORM_ENV(vsum4shs, 4, 25);
6555 GEN_VXFORM_ENV(vsum2sws, 4, 26);
6556 GEN_VXFORM_ENV(vsumsws, 4, 30);
6557 GEN_VXFORM_ENV(vaddfp, 5, 0);
6558 GEN_VXFORM_ENV(vsubfp, 5, 1);
6559 GEN_VXFORM_ENV(vmaxfp, 5, 16);
6560 GEN_VXFORM_ENV(vminfp, 5, 17);
6562 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6563 static void glue(gen_, name)(DisasContext *ctx) \
6565 TCGv_ptr ra, rb, rd; \
6566 if (unlikely(!ctx->altivec_enabled)) { \
6567 gen_exception(ctx, POWERPC_EXCP_VPU); \
6568 return; \
6570 ra = gen_avr_ptr(rA(ctx->opcode)); \
6571 rb = gen_avr_ptr(rB(ctx->opcode)); \
6572 rd = gen_avr_ptr(rD(ctx->opcode)); \
6573 gen_helper_##opname(cpu_env, rd, ra, rb); \
6574 tcg_temp_free_ptr(ra); \
6575 tcg_temp_free_ptr(rb); \
6576 tcg_temp_free_ptr(rd); \
6579 #define GEN_VXRFORM(name, opc2, opc3) \
6580 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6581 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6583 GEN_VXRFORM(vcmpequb, 3, 0)
6584 GEN_VXRFORM(vcmpequh, 3, 1)
6585 GEN_VXRFORM(vcmpequw, 3, 2)
6586 GEN_VXRFORM(vcmpgtsb, 3, 12)
6587 GEN_VXRFORM(vcmpgtsh, 3, 13)
6588 GEN_VXRFORM(vcmpgtsw, 3, 14)
6589 GEN_VXRFORM(vcmpgtub, 3, 8)
6590 GEN_VXRFORM(vcmpgtuh, 3, 9)
6591 GEN_VXRFORM(vcmpgtuw, 3, 10)
6592 GEN_VXRFORM(vcmpeqfp, 3, 3)
6593 GEN_VXRFORM(vcmpgefp, 3, 7)
6594 GEN_VXRFORM(vcmpgtfp, 3, 11)
6595 GEN_VXRFORM(vcmpbfp, 3, 15)
6597 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6598 static void glue(gen_, name)(DisasContext *ctx) \
6600 TCGv_ptr rd; \
6601 TCGv_i32 simm; \
6602 if (unlikely(!ctx->altivec_enabled)) { \
6603 gen_exception(ctx, POWERPC_EXCP_VPU); \
6604 return; \
6606 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6607 rd = gen_avr_ptr(rD(ctx->opcode)); \
6608 gen_helper_##name (rd, simm); \
6609 tcg_temp_free_i32(simm); \
6610 tcg_temp_free_ptr(rd); \
6613 GEN_VXFORM_SIMM(vspltisb, 6, 12);
6614 GEN_VXFORM_SIMM(vspltish, 6, 13);
6615 GEN_VXFORM_SIMM(vspltisw, 6, 14);
6617 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6618 static void glue(gen_, name)(DisasContext *ctx) \
6620 TCGv_ptr rb, rd; \
6621 if (unlikely(!ctx->altivec_enabled)) { \
6622 gen_exception(ctx, POWERPC_EXCP_VPU); \
6623 return; \
6625 rb = gen_avr_ptr(rB(ctx->opcode)); \
6626 rd = gen_avr_ptr(rD(ctx->opcode)); \
6627 gen_helper_##name (rd, rb); \
6628 tcg_temp_free_ptr(rb); \
6629 tcg_temp_free_ptr(rd); \
6632 #define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
6633 static void glue(gen_, name)(DisasContext *ctx) \
6635 TCGv_ptr rb, rd; \
6637 if (unlikely(!ctx->altivec_enabled)) { \
6638 gen_exception(ctx, POWERPC_EXCP_VPU); \
6639 return; \
6641 rb = gen_avr_ptr(rB(ctx->opcode)); \
6642 rd = gen_avr_ptr(rD(ctx->opcode)); \
6643 gen_helper_##name(cpu_env, rd, rb); \
6644 tcg_temp_free_ptr(rb); \
6645 tcg_temp_free_ptr(rd); \
6648 GEN_VXFORM_NOA(vupkhsb, 7, 8);
6649 GEN_VXFORM_NOA(vupkhsh, 7, 9);
6650 GEN_VXFORM_NOA(vupklsb, 7, 10);
6651 GEN_VXFORM_NOA(vupklsh, 7, 11);
6652 GEN_VXFORM_NOA(vupkhpx, 7, 13);
6653 GEN_VXFORM_NOA(vupklpx, 7, 15);
6654 GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
6655 GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
6656 GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
6657 GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
6658 GEN_VXFORM_NOA_ENV(vrfim, 5, 8);
6659 GEN_VXFORM_NOA_ENV(vrfin, 5, 9);
6660 GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
6661 GEN_VXFORM_NOA_ENV(vrfiz, 5, 11);
6663 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6664 static void glue(gen_, name)(DisasContext *ctx) \
6666 TCGv_ptr rd; \
6667 TCGv_i32 simm; \
6668 if (unlikely(!ctx->altivec_enabled)) { \
6669 gen_exception(ctx, POWERPC_EXCP_VPU); \
6670 return; \
6672 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6673 rd = gen_avr_ptr(rD(ctx->opcode)); \
6674 gen_helper_##name (rd, simm); \
6675 tcg_temp_free_i32(simm); \
6676 tcg_temp_free_ptr(rd); \
6679 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6680 static void glue(gen_, name)(DisasContext *ctx) \
6682 TCGv_ptr rb, rd; \
6683 TCGv_i32 uimm; \
6684 if (unlikely(!ctx->altivec_enabled)) { \
6685 gen_exception(ctx, POWERPC_EXCP_VPU); \
6686 return; \
6688 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6689 rb = gen_avr_ptr(rB(ctx->opcode)); \
6690 rd = gen_avr_ptr(rD(ctx->opcode)); \
6691 gen_helper_##name (rd, rb, uimm); \
6692 tcg_temp_free_i32(uimm); \
6693 tcg_temp_free_ptr(rb); \
6694 tcg_temp_free_ptr(rd); \
6697 #define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
6698 static void glue(gen_, name)(DisasContext *ctx) \
6700 TCGv_ptr rb, rd; \
6701 TCGv_i32 uimm; \
6703 if (unlikely(!ctx->altivec_enabled)) { \
6704 gen_exception(ctx, POWERPC_EXCP_VPU); \
6705 return; \
6707 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6708 rb = gen_avr_ptr(rB(ctx->opcode)); \
6709 rd = gen_avr_ptr(rD(ctx->opcode)); \
6710 gen_helper_##name(cpu_env, rd, rb, uimm); \
6711 tcg_temp_free_i32(uimm); \
6712 tcg_temp_free_ptr(rb); \
6713 tcg_temp_free_ptr(rd); \
6716 GEN_VXFORM_UIMM(vspltb, 6, 8);
6717 GEN_VXFORM_UIMM(vsplth, 6, 9);
6718 GEN_VXFORM_UIMM(vspltw, 6, 10);
6719 GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
6720 GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
6721 GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
6722 GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
6724 static void gen_vsldoi(DisasContext *ctx)
6726 TCGv_ptr ra, rb, rd;
6727 TCGv_i32 sh;
6728 if (unlikely(!ctx->altivec_enabled)) {
6729 gen_exception(ctx, POWERPC_EXCP_VPU);
6730 return;
6732 ra = gen_avr_ptr(rA(ctx->opcode));
6733 rb = gen_avr_ptr(rB(ctx->opcode));
6734 rd = gen_avr_ptr(rD(ctx->opcode));
6735 sh = tcg_const_i32(VSH(ctx->opcode));
6736 gen_helper_vsldoi (rd, ra, rb, sh);
6737 tcg_temp_free_ptr(ra);
6738 tcg_temp_free_ptr(rb);
6739 tcg_temp_free_ptr(rd);
6740 tcg_temp_free_i32(sh);
6743 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6744 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6746 TCGv_ptr ra, rb, rc, rd; \
6747 if (unlikely(!ctx->altivec_enabled)) { \
6748 gen_exception(ctx, POWERPC_EXCP_VPU); \
6749 return; \
6751 ra = gen_avr_ptr(rA(ctx->opcode)); \
6752 rb = gen_avr_ptr(rB(ctx->opcode)); \
6753 rc = gen_avr_ptr(rC(ctx->opcode)); \
6754 rd = gen_avr_ptr(rD(ctx->opcode)); \
6755 if (Rc(ctx->opcode)) { \
6756 gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
6757 } else { \
6758 gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
6760 tcg_temp_free_ptr(ra); \
6761 tcg_temp_free_ptr(rb); \
6762 tcg_temp_free_ptr(rc); \
6763 tcg_temp_free_ptr(rd); \
6766 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6768 static void gen_vmladduhm(DisasContext *ctx)
6770 TCGv_ptr ra, rb, rc, rd;
6771 if (unlikely(!ctx->altivec_enabled)) {
6772 gen_exception(ctx, POWERPC_EXCP_VPU);
6773 return;
6775 ra = gen_avr_ptr(rA(ctx->opcode));
6776 rb = gen_avr_ptr(rB(ctx->opcode));
6777 rc = gen_avr_ptr(rC(ctx->opcode));
6778 rd = gen_avr_ptr(rD(ctx->opcode));
6779 gen_helper_vmladduhm(rd, ra, rb, rc);
6780 tcg_temp_free_ptr(ra);
6781 tcg_temp_free_ptr(rb);
6782 tcg_temp_free_ptr(rc);
6783 tcg_temp_free_ptr(rd);
6786 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6787 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6788 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6789 GEN_VAFORM_PAIRED(vsel, vperm, 21)
6790 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6792 /*** SPE extension ***/
6793 /* Register moves */
6796 static inline void gen_evmra(DisasContext *ctx)
6799 if (unlikely(!ctx->spe_enabled)) {
6800 gen_exception(ctx, POWERPC_EXCP_SPEU);
6801 return;
6804 #if defined(TARGET_PPC64)
6805 /* rD := rA */
6806 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6808 /* spe_acc := rA */
6809 tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6810 cpu_env,
6811 offsetof(CPUPPCState, spe_acc));
6812 #else
6813 TCGv_i64 tmp = tcg_temp_new_i64();
6815 /* tmp := rA_lo + rA_hi << 32 */
6816 tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6818 /* spe_acc := tmp */
6819 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
6820 tcg_temp_free_i64(tmp);
6822 /* rD := rA */
6823 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6824 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6825 #endif
6828 static inline void gen_load_gpr64(TCGv_i64 t, int reg)
6830 #if defined(TARGET_PPC64)
6831 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6832 #else
6833 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6834 #endif
6837 static inline void gen_store_gpr64(int reg, TCGv_i64 t)
6839 #if defined(TARGET_PPC64)
6840 tcg_gen_mov_i64(cpu_gpr[reg], t);
6841 #else
6842 TCGv_i64 tmp = tcg_temp_new_i64();
6843 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6844 tcg_gen_shri_i64(tmp, t, 32);
6845 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6846 tcg_temp_free_i64(tmp);
6847 #endif
6850 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
6851 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6853 if (Rc(ctx->opcode)) \
6854 gen_##name1(ctx); \
6855 else \
6856 gen_##name0(ctx); \
6859 /* Handler for undefined SPE opcodes */
6860 static inline void gen_speundef(DisasContext *ctx)
6862 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6865 /* SPE logic */
6866 #if defined(TARGET_PPC64)
6867 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6868 static inline void gen_##name(DisasContext *ctx) \
6870 if (unlikely(!ctx->spe_enabled)) { \
6871 gen_exception(ctx, POWERPC_EXCP_SPEU); \
6872 return; \
6874 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6875 cpu_gpr[rB(ctx->opcode)]); \
6877 #else
6878 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6879 static inline void gen_##name(DisasContext *ctx) \
6881 if (unlikely(!ctx->spe_enabled)) { \
6882 gen_exception(ctx, POWERPC_EXCP_SPEU); \
6883 return; \
6885 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6886 cpu_gpr[rB(ctx->opcode)]); \
6887 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6888 cpu_gprh[rB(ctx->opcode)]); \
6890 #endif
6892 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6893 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6894 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6895 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6896 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6897 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6898 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6899 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6901 /* SPE logic immediate */
6902 #if defined(TARGET_PPC64)
6903 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6904 static inline void gen_##name(DisasContext *ctx) \
6906 if (unlikely(!ctx->spe_enabled)) { \
6907 gen_exception(ctx, POWERPC_EXCP_SPEU); \
6908 return; \
6910 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6911 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6912 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6913 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6914 tcg_opi(t0, t0, rB(ctx->opcode)); \
6915 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6916 tcg_gen_trunc_i64_i32(t1, t2); \
6917 tcg_temp_free_i64(t2); \
6918 tcg_opi(t1, t1, rB(ctx->opcode)); \
6919 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6920 tcg_temp_free_i32(t0); \
6921 tcg_temp_free_i32(t1); \
6923 #else
6924 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6925 static inline void gen_##name(DisasContext *ctx) \
6927 if (unlikely(!ctx->spe_enabled)) { \
6928 gen_exception(ctx, POWERPC_EXCP_SPEU); \
6929 return; \
6931 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6932 rB(ctx->opcode)); \
6933 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6934 rB(ctx->opcode)); \
6936 #endif
6937 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6938 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6939 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6940 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6942 /* SPE arithmetic */
6943 #if defined(TARGET_PPC64)
6944 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6945 static inline void gen_##name(DisasContext *ctx) \
6947 if (unlikely(!ctx->spe_enabled)) { \
6948 gen_exception(ctx, POWERPC_EXCP_SPEU); \
6949 return; \
6951 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6952 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6953 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6954 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6955 tcg_op(t0, t0); \
6956 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6957 tcg_gen_trunc_i64_i32(t1, t2); \
6958 tcg_temp_free_i64(t2); \
6959 tcg_op(t1, t1); \
6960 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6961 tcg_temp_free_i32(t0); \
6962 tcg_temp_free_i32(t1); \
6964 #else
6965 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6966 static inline void gen_##name(DisasContext *ctx) \
6968 if (unlikely(!ctx->spe_enabled)) { \
6969 gen_exception(ctx, POWERPC_EXCP_SPEU); \
6970 return; \
6972 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6973 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6975 #endif
6977 static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
6979 int l1 = gen_new_label();
6980 int l2 = gen_new_label();
6982 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6983 tcg_gen_neg_i32(ret, arg1);
6984 tcg_gen_br(l2);
6985 gen_set_label(l1);
6986 tcg_gen_mov_i32(ret, arg1);
6987 gen_set_label(l2);
6989 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6990 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6991 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6992 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6993 static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
6995 tcg_gen_addi_i32(ret, arg1, 0x8000);
6996 tcg_gen_ext16u_i32(ret, ret);
6998 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6999 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
7000 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
7002 #if defined(TARGET_PPC64)
7003 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7004 static inline void gen_##name(DisasContext *ctx) \
7006 if (unlikely(!ctx->spe_enabled)) { \
7007 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7008 return; \
7010 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7011 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7012 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
7013 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
7014 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7015 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
7016 tcg_op(t0, t0, t2); \
7017 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
7018 tcg_gen_trunc_i64_i32(t1, t3); \
7019 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
7020 tcg_gen_trunc_i64_i32(t2, t3); \
7021 tcg_temp_free_i64(t3); \
7022 tcg_op(t1, t1, t2); \
7023 tcg_temp_free_i32(t2); \
7024 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7025 tcg_temp_free_i32(t0); \
7026 tcg_temp_free_i32(t1); \
7028 #else
7029 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7030 static inline void gen_##name(DisasContext *ctx) \
7032 if (unlikely(!ctx->spe_enabled)) { \
7033 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7034 return; \
7036 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7037 cpu_gpr[rB(ctx->opcode)]); \
7038 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7039 cpu_gprh[rB(ctx->opcode)]); \
7041 #endif
7043 static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7045 TCGv_i32 t0;
7046 int l1, l2;
7048 l1 = gen_new_label();
7049 l2 = gen_new_label();
7050 t0 = tcg_temp_local_new_i32();
7051 /* No error here: 6 bits are used */
7052 tcg_gen_andi_i32(t0, arg2, 0x3F);
7053 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7054 tcg_gen_shr_i32(ret, arg1, t0);
7055 tcg_gen_br(l2);
7056 gen_set_label(l1);
7057 tcg_gen_movi_i32(ret, 0);
7058 gen_set_label(l2);
7059 tcg_temp_free_i32(t0);
7061 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
7062 static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7064 TCGv_i32 t0;
7065 int l1, l2;
7067 l1 = gen_new_label();
7068 l2 = gen_new_label();
7069 t0 = tcg_temp_local_new_i32();
7070 /* No error here: 6 bits are used */
7071 tcg_gen_andi_i32(t0, arg2, 0x3F);
7072 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7073 tcg_gen_sar_i32(ret, arg1, t0);
7074 tcg_gen_br(l2);
7075 gen_set_label(l1);
7076 tcg_gen_movi_i32(ret, 0);
7077 gen_set_label(l2);
7078 tcg_temp_free_i32(t0);
7080 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
7081 static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7083 TCGv_i32 t0;
7084 int l1, l2;
7086 l1 = gen_new_label();
7087 l2 = gen_new_label();
7088 t0 = tcg_temp_local_new_i32();
7089 /* No error here: 6 bits are used */
7090 tcg_gen_andi_i32(t0, arg2, 0x3F);
7091 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
7092 tcg_gen_shl_i32(ret, arg1, t0);
7093 tcg_gen_br(l2);
7094 gen_set_label(l1);
7095 tcg_gen_movi_i32(ret, 0);
7096 gen_set_label(l2);
7097 tcg_temp_free_i32(t0);
7099 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
7100 static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7102 TCGv_i32 t0 = tcg_temp_new_i32();
7103 tcg_gen_andi_i32(t0, arg2, 0x1F);
7104 tcg_gen_rotl_i32(ret, arg1, t0);
7105 tcg_temp_free_i32(t0);
7107 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
7108 static inline void gen_evmergehi(DisasContext *ctx)
7110 if (unlikely(!ctx->spe_enabled)) {
7111 gen_exception(ctx, POWERPC_EXCP_SPEU);
7112 return;
7114 #if defined(TARGET_PPC64)
7115 TCGv t0 = tcg_temp_new();
7116 TCGv t1 = tcg_temp_new();
7117 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7118 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7119 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7120 tcg_temp_free(t0);
7121 tcg_temp_free(t1);
7122 #else
7123 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7124 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7125 #endif
7127 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
7128 static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
7130 tcg_gen_sub_i32(ret, arg2, arg1);
7132 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
7134 /* SPE arithmetic immediate */
7135 #if defined(TARGET_PPC64)
7136 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7137 static inline void gen_##name(DisasContext *ctx) \
7139 if (unlikely(!ctx->spe_enabled)) { \
7140 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7141 return; \
7143 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7144 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7145 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7146 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7147 tcg_op(t0, t0, rA(ctx->opcode)); \
7148 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7149 tcg_gen_trunc_i64_i32(t1, t2); \
7150 tcg_temp_free_i64(t2); \
7151 tcg_op(t1, t1, rA(ctx->opcode)); \
7152 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7153 tcg_temp_free_i32(t0); \
7154 tcg_temp_free_i32(t1); \
7156 #else
7157 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7158 static inline void gen_##name(DisasContext *ctx) \
7160 if (unlikely(!ctx->spe_enabled)) { \
7161 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7162 return; \
7164 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
7165 rA(ctx->opcode)); \
7166 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
7167 rA(ctx->opcode)); \
7169 #endif
7170 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
7171 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
7173 /* SPE comparison */
7174 #if defined(TARGET_PPC64)
7175 #define GEN_SPEOP_COMP(name, tcg_cond) \
7176 static inline void gen_##name(DisasContext *ctx) \
7178 if (unlikely(!ctx->spe_enabled)) { \
7179 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7180 return; \
7182 int l1 = gen_new_label(); \
7183 int l2 = gen_new_label(); \
7184 int l3 = gen_new_label(); \
7185 int l4 = gen_new_label(); \
7186 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7187 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7188 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7189 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7190 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7191 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7192 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7193 tcg_gen_br(l2); \
7194 gen_set_label(l1); \
7195 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7196 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7197 gen_set_label(l2); \
7198 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7199 tcg_gen_trunc_i64_i32(t0, t2); \
7200 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7201 tcg_gen_trunc_i64_i32(t1, t2); \
7202 tcg_temp_free_i64(t2); \
7203 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7204 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7205 ~(CRF_CH | CRF_CH_AND_CL)); \
7206 tcg_gen_br(l4); \
7207 gen_set_label(l3); \
7208 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7209 CRF_CH | CRF_CH_OR_CL); \
7210 gen_set_label(l4); \
7211 tcg_temp_free_i32(t0); \
7212 tcg_temp_free_i32(t1); \
7214 #else
7215 #define GEN_SPEOP_COMP(name, tcg_cond) \
7216 static inline void gen_##name(DisasContext *ctx) \
7218 if (unlikely(!ctx->spe_enabled)) { \
7219 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7220 return; \
7222 int l1 = gen_new_label(); \
7223 int l2 = gen_new_label(); \
7224 int l3 = gen_new_label(); \
7225 int l4 = gen_new_label(); \
7227 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7228 cpu_gpr[rB(ctx->opcode)], l1); \
7229 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7230 tcg_gen_br(l2); \
7231 gen_set_label(l1); \
7232 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7233 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7234 gen_set_label(l2); \
7235 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7236 cpu_gprh[rB(ctx->opcode)], l3); \
7237 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7238 ~(CRF_CH | CRF_CH_AND_CL)); \
7239 tcg_gen_br(l4); \
7240 gen_set_label(l3); \
7241 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7242 CRF_CH | CRF_CH_OR_CL); \
7243 gen_set_label(l4); \
7245 #endif
7246 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7247 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7248 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7249 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7250 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7252 /* SPE misc */
7253 static inline void gen_brinc(DisasContext *ctx)
7255 /* Note: brinc is usable even if SPE is disabled */
7256 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7257 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7259 static inline void gen_evmergelo(DisasContext *ctx)
7261 if (unlikely(!ctx->spe_enabled)) {
7262 gen_exception(ctx, POWERPC_EXCP_SPEU);
7263 return;
7265 #if defined(TARGET_PPC64)
7266 TCGv t0 = tcg_temp_new();
7267 TCGv t1 = tcg_temp_new();
7268 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7269 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7270 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7271 tcg_temp_free(t0);
7272 tcg_temp_free(t1);
7273 #else
7274 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7275 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7276 #endif
7278 static inline void gen_evmergehilo(DisasContext *ctx)
7280 if (unlikely(!ctx->spe_enabled)) {
7281 gen_exception(ctx, POWERPC_EXCP_SPEU);
7282 return;
7284 #if defined(TARGET_PPC64)
7285 TCGv t0 = tcg_temp_new();
7286 TCGv t1 = tcg_temp_new();
7287 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7288 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7289 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7290 tcg_temp_free(t0);
7291 tcg_temp_free(t1);
7292 #else
7293 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7294 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7295 #endif
7297 static inline void gen_evmergelohi(DisasContext *ctx)
7299 if (unlikely(!ctx->spe_enabled)) {
7300 gen_exception(ctx, POWERPC_EXCP_SPEU);
7301 return;
7303 #if defined(TARGET_PPC64)
7304 TCGv t0 = tcg_temp_new();
7305 TCGv t1 = tcg_temp_new();
7306 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7307 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7308 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7309 tcg_temp_free(t0);
7310 tcg_temp_free(t1);
7311 #else
7312 if (rD(ctx->opcode) == rA(ctx->opcode)) {
7313 TCGv_i32 tmp = tcg_temp_new_i32();
7314 tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7315 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7316 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7317 tcg_temp_free_i32(tmp);
7318 } else {
7319 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7320 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7322 #endif
7324 static inline void gen_evsplati(DisasContext *ctx)
7326 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7328 #if defined(TARGET_PPC64)
7329 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7330 #else
7331 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7332 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7333 #endif
7335 static inline void gen_evsplatfi(DisasContext *ctx)
7337 uint64_t imm = rA(ctx->opcode) << 27;
7339 #if defined(TARGET_PPC64)
7340 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7341 #else
7342 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7343 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7344 #endif
7347 static inline void gen_evsel(DisasContext *ctx)
7349 int l1 = gen_new_label();
7350 int l2 = gen_new_label();
7351 int l3 = gen_new_label();
7352 int l4 = gen_new_label();
7353 TCGv_i32 t0 = tcg_temp_local_new_i32();
7354 #if defined(TARGET_PPC64)
7355 TCGv t1 = tcg_temp_local_new();
7356 TCGv t2 = tcg_temp_local_new();
7357 #endif
7358 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7359 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7360 #if defined(TARGET_PPC64)
7361 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7362 #else
7363 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7364 #endif
7365 tcg_gen_br(l2);
7366 gen_set_label(l1);
7367 #if defined(TARGET_PPC64)
7368 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7369 #else
7370 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7371 #endif
7372 gen_set_label(l2);
7373 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7374 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7375 #if defined(TARGET_PPC64)
7376 tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7377 #else
7378 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7379 #endif
7380 tcg_gen_br(l4);
7381 gen_set_label(l3);
7382 #if defined(TARGET_PPC64)
7383 tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7384 #else
7385 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7386 #endif
7387 gen_set_label(l4);
7388 tcg_temp_free_i32(t0);
7389 #if defined(TARGET_PPC64)
7390 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7391 tcg_temp_free(t1);
7392 tcg_temp_free(t2);
7393 #endif
7396 static void gen_evsel0(DisasContext *ctx)
7398 gen_evsel(ctx);
7401 static void gen_evsel1(DisasContext *ctx)
7403 gen_evsel(ctx);
7406 static void gen_evsel2(DisasContext *ctx)
7408 gen_evsel(ctx);
7411 static void gen_evsel3(DisasContext *ctx)
7413 gen_evsel(ctx);
7416 /* Multiply */
7418 static inline void gen_evmwumi(DisasContext *ctx)
7420 TCGv_i64 t0, t1;
7422 if (unlikely(!ctx->spe_enabled)) {
7423 gen_exception(ctx, POWERPC_EXCP_SPEU);
7424 return;
7427 t0 = tcg_temp_new_i64();
7428 t1 = tcg_temp_new_i64();
7430 /* t0 := rA; t1 := rB */
7431 #if defined(TARGET_PPC64)
7432 tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7433 tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7434 #else
7435 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7436 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7437 #endif
7439 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7441 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7443 tcg_temp_free_i64(t0);
7444 tcg_temp_free_i64(t1);
7447 static inline void gen_evmwumia(DisasContext *ctx)
7449 TCGv_i64 tmp;
7451 if (unlikely(!ctx->spe_enabled)) {
7452 gen_exception(ctx, POWERPC_EXCP_SPEU);
7453 return;
7456 gen_evmwumi(ctx); /* rD := rA * rB */
7458 tmp = tcg_temp_new_i64();
7460 /* acc := rD */
7461 gen_load_gpr64(tmp, rD(ctx->opcode));
7462 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7463 tcg_temp_free_i64(tmp);
7466 static inline void gen_evmwumiaa(DisasContext *ctx)
7468 TCGv_i64 acc;
7469 TCGv_i64 tmp;
7471 if (unlikely(!ctx->spe_enabled)) {
7472 gen_exception(ctx, POWERPC_EXCP_SPEU);
7473 return;
7476 gen_evmwumi(ctx); /* rD := rA * rB */
7478 acc = tcg_temp_new_i64();
7479 tmp = tcg_temp_new_i64();
7481 /* tmp := rD */
7482 gen_load_gpr64(tmp, rD(ctx->opcode));
7484 /* Load acc */
7485 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7487 /* acc := tmp + acc */
7488 tcg_gen_add_i64(acc, acc, tmp);
7490 /* Store acc */
7491 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7493 /* rD := acc */
7494 gen_store_gpr64(rD(ctx->opcode), acc);
7496 tcg_temp_free_i64(acc);
7497 tcg_temp_free_i64(tmp);
7500 static inline void gen_evmwsmi(DisasContext *ctx)
7502 TCGv_i64 t0, t1;
7504 if (unlikely(!ctx->spe_enabled)) {
7505 gen_exception(ctx, POWERPC_EXCP_SPEU);
7506 return;
7509 t0 = tcg_temp_new_i64();
7510 t1 = tcg_temp_new_i64();
7512 /* t0 := rA; t1 := rB */
7513 #if defined(TARGET_PPC64)
7514 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7515 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7516 #else
7517 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7518 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7519 #endif
7521 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7523 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7525 tcg_temp_free_i64(t0);
7526 tcg_temp_free_i64(t1);
7529 static inline void gen_evmwsmia(DisasContext *ctx)
7531 TCGv_i64 tmp;
7533 gen_evmwsmi(ctx); /* rD := rA * rB */
7535 tmp = tcg_temp_new_i64();
7537 /* acc := rD */
7538 gen_load_gpr64(tmp, rD(ctx->opcode));
7539 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7541 tcg_temp_free_i64(tmp);
7544 static inline void gen_evmwsmiaa(DisasContext *ctx)
7546 TCGv_i64 acc = tcg_temp_new_i64();
7547 TCGv_i64 tmp = tcg_temp_new_i64();
7549 gen_evmwsmi(ctx); /* rD := rA * rB */
7551 acc = tcg_temp_new_i64();
7552 tmp = tcg_temp_new_i64();
7554 /* tmp := rD */
7555 gen_load_gpr64(tmp, rD(ctx->opcode));
7557 /* Load acc */
7558 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7560 /* acc := tmp + acc */
7561 tcg_gen_add_i64(acc, acc, tmp);
7563 /* Store acc */
7564 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
7566 /* rD := acc */
7567 gen_store_gpr64(rD(ctx->opcode), acc);
7569 tcg_temp_free_i64(acc);
7570 tcg_temp_free_i64(tmp);
7573 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7574 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7575 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7576 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7577 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7578 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7579 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
7580 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE); //
7581 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE);
7582 GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7583 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7584 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7585 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7586 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7587 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7588 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE);
7589 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
7590 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7591 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7592 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE);
7593 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
7594 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7595 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE); //
7596 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE);
7597 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7598 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
7599 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7600 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
7601 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE); ////
7603 /* SPE load and stores */
7604 static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7606 target_ulong uimm = rB(ctx->opcode);
7608 if (rA(ctx->opcode) == 0) {
7609 tcg_gen_movi_tl(EA, uimm << sh);
7610 } else {
7611 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7612 if (NARROW_MODE(ctx)) {
7613 tcg_gen_ext32u_tl(EA, EA);
7618 static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7620 #if defined(TARGET_PPC64)
7621 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7622 #else
7623 TCGv_i64 t0 = tcg_temp_new_i64();
7624 gen_qemu_ld64(ctx, t0, addr);
7625 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7626 tcg_gen_shri_i64(t0, t0, 32);
7627 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7628 tcg_temp_free_i64(t0);
7629 #endif
7632 static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7634 #if defined(TARGET_PPC64)
7635 TCGv t0 = tcg_temp_new();
7636 gen_qemu_ld32u(ctx, t0, addr);
7637 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7638 gen_addr_add(ctx, addr, addr, 4);
7639 gen_qemu_ld32u(ctx, t0, addr);
7640 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7641 tcg_temp_free(t0);
7642 #else
7643 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7644 gen_addr_add(ctx, addr, addr, 4);
7645 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7646 #endif
7649 static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7651 TCGv t0 = tcg_temp_new();
7652 #if defined(TARGET_PPC64)
7653 gen_qemu_ld16u(ctx, t0, addr);
7654 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7655 gen_addr_add(ctx, addr, addr, 2);
7656 gen_qemu_ld16u(ctx, t0, addr);
7657 tcg_gen_shli_tl(t0, t0, 32);
7658 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7659 gen_addr_add(ctx, addr, addr, 2);
7660 gen_qemu_ld16u(ctx, t0, addr);
7661 tcg_gen_shli_tl(t0, t0, 16);
7662 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7663 gen_addr_add(ctx, addr, addr, 2);
7664 gen_qemu_ld16u(ctx, t0, addr);
7665 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7666 #else
7667 gen_qemu_ld16u(ctx, t0, addr);
7668 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7669 gen_addr_add(ctx, addr, addr, 2);
7670 gen_qemu_ld16u(ctx, t0, addr);
7671 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7672 gen_addr_add(ctx, addr, addr, 2);
7673 gen_qemu_ld16u(ctx, t0, addr);
7674 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7675 gen_addr_add(ctx, addr, addr, 2);
7676 gen_qemu_ld16u(ctx, t0, addr);
7677 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7678 #endif
7679 tcg_temp_free(t0);
7682 static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7684 TCGv t0 = tcg_temp_new();
7685 gen_qemu_ld16u(ctx, t0, addr);
7686 #if defined(TARGET_PPC64)
7687 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7688 tcg_gen_shli_tl(t0, t0, 16);
7689 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7690 #else
7691 tcg_gen_shli_tl(t0, t0, 16);
7692 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7693 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7694 #endif
7695 tcg_temp_free(t0);
7698 static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7700 TCGv t0 = tcg_temp_new();
7701 gen_qemu_ld16u(ctx, t0, addr);
7702 #if defined(TARGET_PPC64)
7703 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7704 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7705 #else
7706 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7707 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7708 #endif
7709 tcg_temp_free(t0);
7712 static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7714 TCGv t0 = tcg_temp_new();
7715 gen_qemu_ld16s(ctx, t0, addr);
7716 #if defined(TARGET_PPC64)
7717 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7718 tcg_gen_ext32u_tl(t0, t0);
7719 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7720 #else
7721 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7722 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7723 #endif
7724 tcg_temp_free(t0);
7727 static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7729 TCGv t0 = tcg_temp_new();
7730 #if defined(TARGET_PPC64)
7731 gen_qemu_ld16u(ctx, t0, addr);
7732 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7733 gen_addr_add(ctx, addr, addr, 2);
7734 gen_qemu_ld16u(ctx, t0, addr);
7735 tcg_gen_shli_tl(t0, t0, 16);
7736 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7737 #else
7738 gen_qemu_ld16u(ctx, t0, addr);
7739 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7740 gen_addr_add(ctx, addr, addr, 2);
7741 gen_qemu_ld16u(ctx, t0, addr);
7742 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7743 #endif
7744 tcg_temp_free(t0);
7747 static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7749 #if defined(TARGET_PPC64)
7750 TCGv t0 = tcg_temp_new();
7751 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7752 gen_addr_add(ctx, addr, addr, 2);
7753 gen_qemu_ld16u(ctx, t0, addr);
7754 tcg_gen_shli_tl(t0, t0, 32);
7755 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7756 tcg_temp_free(t0);
7757 #else
7758 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7759 gen_addr_add(ctx, addr, addr, 2);
7760 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7761 #endif
7764 static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7766 #if defined(TARGET_PPC64)
7767 TCGv t0 = tcg_temp_new();
7768 gen_qemu_ld16s(ctx, t0, addr);
7769 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7770 gen_addr_add(ctx, addr, addr, 2);
7771 gen_qemu_ld16s(ctx, t0, addr);
7772 tcg_gen_shli_tl(t0, t0, 32);
7773 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7774 tcg_temp_free(t0);
7775 #else
7776 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7777 gen_addr_add(ctx, addr, addr, 2);
7778 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7779 #endif
7782 static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7784 TCGv t0 = tcg_temp_new();
7785 gen_qemu_ld32u(ctx, t0, addr);
7786 #if defined(TARGET_PPC64)
7787 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7788 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7789 #else
7790 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7791 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7792 #endif
7793 tcg_temp_free(t0);
7796 static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7798 TCGv t0 = tcg_temp_new();
7799 #if defined(TARGET_PPC64)
7800 gen_qemu_ld16u(ctx, t0, addr);
7801 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7802 tcg_gen_shli_tl(t0, t0, 32);
7803 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7804 gen_addr_add(ctx, addr, addr, 2);
7805 gen_qemu_ld16u(ctx, t0, addr);
7806 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7807 tcg_gen_shli_tl(t0, t0, 16);
7808 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7809 #else
7810 gen_qemu_ld16u(ctx, t0, addr);
7811 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7812 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7813 gen_addr_add(ctx, addr, addr, 2);
7814 gen_qemu_ld16u(ctx, t0, addr);
7815 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7816 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7817 #endif
7818 tcg_temp_free(t0);
7821 static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
7823 #if defined(TARGET_PPC64)
7824 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7825 #else
7826 TCGv_i64 t0 = tcg_temp_new_i64();
7827 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
7828 gen_qemu_st64(ctx, t0, addr);
7829 tcg_temp_free_i64(t0);
7830 #endif
7833 static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
7835 #if defined(TARGET_PPC64)
7836 TCGv t0 = tcg_temp_new();
7837 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7838 gen_qemu_st32(ctx, t0, addr);
7839 tcg_temp_free(t0);
7840 #else
7841 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7842 #endif
7843 gen_addr_add(ctx, addr, addr, 4);
7844 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7847 static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7849 TCGv t0 = tcg_temp_new();
7850 #if defined(TARGET_PPC64)
7851 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7852 #else
7853 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7854 #endif
7855 gen_qemu_st16(ctx, t0, addr);
7856 gen_addr_add(ctx, addr, addr, 2);
7857 #if defined(TARGET_PPC64)
7858 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7859 gen_qemu_st16(ctx, t0, addr);
7860 #else
7861 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7862 #endif
7863 gen_addr_add(ctx, addr, addr, 2);
7864 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7865 gen_qemu_st16(ctx, t0, addr);
7866 tcg_temp_free(t0);
7867 gen_addr_add(ctx, addr, addr, 2);
7868 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7871 static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7873 TCGv t0 = tcg_temp_new();
7874 #if defined(TARGET_PPC64)
7875 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7876 #else
7877 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7878 #endif
7879 gen_qemu_st16(ctx, t0, addr);
7880 gen_addr_add(ctx, addr, addr, 2);
7881 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7882 gen_qemu_st16(ctx, t0, addr);
7883 tcg_temp_free(t0);
7886 static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7888 #if defined(TARGET_PPC64)
7889 TCGv t0 = tcg_temp_new();
7890 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7891 gen_qemu_st16(ctx, t0, addr);
7892 tcg_temp_free(t0);
7893 #else
7894 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7895 #endif
7896 gen_addr_add(ctx, addr, addr, 2);
7897 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7900 static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7902 #if defined(TARGET_PPC64)
7903 TCGv t0 = tcg_temp_new();
7904 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7905 gen_qemu_st32(ctx, t0, addr);
7906 tcg_temp_free(t0);
7907 #else
7908 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7909 #endif
7912 static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7914 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7917 #define GEN_SPEOP_LDST(name, opc2, sh) \
7918 static void glue(gen_, name)(DisasContext *ctx) \
7920 TCGv t0; \
7921 if (unlikely(!ctx->spe_enabled)) { \
7922 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7923 return; \
7925 gen_set_access_type(ctx, ACCESS_INT); \
7926 t0 = tcg_temp_new(); \
7927 if (Rc(ctx->opcode)) { \
7928 gen_addr_spe_imm_index(ctx, t0, sh); \
7929 } else { \
7930 gen_addr_reg_index(ctx, t0); \
7932 gen_op_##name(ctx, t0); \
7933 tcg_temp_free(t0); \
7936 GEN_SPEOP_LDST(evldd, 0x00, 3);
7937 GEN_SPEOP_LDST(evldw, 0x01, 3);
7938 GEN_SPEOP_LDST(evldh, 0x02, 3);
7939 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7940 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7941 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7942 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7943 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7944 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7945 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7946 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7948 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7949 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7950 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7951 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7952 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7953 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7954 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7956 /* Multiply and add - TODO */
7957 #if 0
7958 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);//
7959 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7960 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7961 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7962 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7963 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7964 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7965 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7966 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7967 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7968 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, 0x00000000, PPC_SPE);
7969 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7971 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7972 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7973 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7974 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7975 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7976 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7977 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7978 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
7979 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, 0x00000000, PPC_SPE);
7980 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7981 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7982 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7984 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7985 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7986 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7987 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
7988 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, 0x00000000, PPC_SPE);
7990 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7991 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7992 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7993 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7994 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7995 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7996 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7997 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
7998 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, 0x00000000, PPC_SPE);
7999 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8000 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8001 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8003 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8004 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8005 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8006 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8008 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8009 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8010 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8011 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8012 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8013 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8014 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8015 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8016 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8017 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8018 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8019 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8021 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8022 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8023 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8024 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8025 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8026 #endif
8028 /*** SPE floating-point extension ***/
8029 #if defined(TARGET_PPC64)
8030 #define GEN_SPEFPUOP_CONV_32_32(name) \
8031 static inline void gen_##name(DisasContext *ctx) \
8033 TCGv_i32 t0; \
8034 TCGv t1; \
8035 t0 = tcg_temp_new_i32(); \
8036 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8037 gen_helper_##name(t0, cpu_env, t0); \
8038 t1 = tcg_temp_new(); \
8039 tcg_gen_extu_i32_tl(t1, t0); \
8040 tcg_temp_free_i32(t0); \
8041 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8042 0xFFFFFFFF00000000ULL); \
8043 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8044 tcg_temp_free(t1); \
8046 #define GEN_SPEFPUOP_CONV_32_64(name) \
8047 static inline void gen_##name(DisasContext *ctx) \
8049 TCGv_i32 t0; \
8050 TCGv t1; \
8051 t0 = tcg_temp_new_i32(); \
8052 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8053 t1 = tcg_temp_new(); \
8054 tcg_gen_extu_i32_tl(t1, t0); \
8055 tcg_temp_free_i32(t0); \
8056 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8057 0xFFFFFFFF00000000ULL); \
8058 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8059 tcg_temp_free(t1); \
8061 #define GEN_SPEFPUOP_CONV_64_32(name) \
8062 static inline void gen_##name(DisasContext *ctx) \
8064 TCGv_i32 t0 = tcg_temp_new_i32(); \
8065 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8066 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8067 tcg_temp_free_i32(t0); \
8069 #define GEN_SPEFPUOP_CONV_64_64(name) \
8070 static inline void gen_##name(DisasContext *ctx) \
8072 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8073 cpu_gpr[rB(ctx->opcode)]); \
8075 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
8076 static inline void gen_##name(DisasContext *ctx) \
8078 TCGv_i32 t0, t1; \
8079 TCGv_i64 t2; \
8080 if (unlikely(!ctx->spe_enabled)) { \
8081 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8082 return; \
8084 t0 = tcg_temp_new_i32(); \
8085 t1 = tcg_temp_new_i32(); \
8086 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8087 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8088 gen_helper_##name(t0, cpu_env, t0, t1); \
8089 tcg_temp_free_i32(t1); \
8090 t2 = tcg_temp_new(); \
8091 tcg_gen_extu_i32_tl(t2, t0); \
8092 tcg_temp_free_i32(t0); \
8093 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8094 0xFFFFFFFF00000000ULL); \
8095 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
8096 tcg_temp_free(t2); \
8098 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8099 static inline void gen_##name(DisasContext *ctx) \
8101 if (unlikely(!ctx->spe_enabled)) { \
8102 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8103 return; \
8105 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8106 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8108 #define GEN_SPEFPUOP_COMP_32(name) \
8109 static inline void gen_##name(DisasContext *ctx) \
8111 TCGv_i32 t0, t1; \
8112 if (unlikely(!ctx->spe_enabled)) { \
8113 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8114 return; \
8116 t0 = tcg_temp_new_i32(); \
8117 t1 = tcg_temp_new_i32(); \
8118 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8119 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8120 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8121 tcg_temp_free_i32(t0); \
8122 tcg_temp_free_i32(t1); \
8124 #define GEN_SPEFPUOP_COMP_64(name) \
8125 static inline void gen_##name(DisasContext *ctx) \
8127 if (unlikely(!ctx->spe_enabled)) { \
8128 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8129 return; \
8131 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8132 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8134 #else
8135 #define GEN_SPEFPUOP_CONV_32_32(name) \
8136 static inline void gen_##name(DisasContext *ctx) \
8138 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8139 cpu_gpr[rB(ctx->opcode)]); \
8141 #define GEN_SPEFPUOP_CONV_32_64(name) \
8142 static inline void gen_##name(DisasContext *ctx) \
8144 TCGv_i64 t0 = tcg_temp_new_i64(); \
8145 gen_load_gpr64(t0, rB(ctx->opcode)); \
8146 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8147 tcg_temp_free_i64(t0); \
8149 #define GEN_SPEFPUOP_CONV_64_32(name) \
8150 static inline void gen_##name(DisasContext *ctx) \
8152 TCGv_i64 t0 = tcg_temp_new_i64(); \
8153 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8154 gen_store_gpr64(rD(ctx->opcode), t0); \
8155 tcg_temp_free_i64(t0); \
8157 #define GEN_SPEFPUOP_CONV_64_64(name) \
8158 static inline void gen_##name(DisasContext *ctx) \
8160 TCGv_i64 t0 = tcg_temp_new_i64(); \
8161 gen_load_gpr64(t0, rB(ctx->opcode)); \
8162 gen_helper_##name(t0, cpu_env, t0); \
8163 gen_store_gpr64(rD(ctx->opcode), t0); \
8164 tcg_temp_free_i64(t0); \
8166 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
8167 static inline void gen_##name(DisasContext *ctx) \
8169 if (unlikely(!ctx->spe_enabled)) { \
8170 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8171 return; \
8173 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8174 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8176 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8177 static inline void gen_##name(DisasContext *ctx) \
8179 TCGv_i64 t0, t1; \
8180 if (unlikely(!ctx->spe_enabled)) { \
8181 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8182 return; \
8184 t0 = tcg_temp_new_i64(); \
8185 t1 = tcg_temp_new_i64(); \
8186 gen_load_gpr64(t0, rA(ctx->opcode)); \
8187 gen_load_gpr64(t1, rB(ctx->opcode)); \
8188 gen_helper_##name(t0, cpu_env, t0, t1); \
8189 gen_store_gpr64(rD(ctx->opcode), t0); \
8190 tcg_temp_free_i64(t0); \
8191 tcg_temp_free_i64(t1); \
8193 #define GEN_SPEFPUOP_COMP_32(name) \
8194 static inline void gen_##name(DisasContext *ctx) \
8196 if (unlikely(!ctx->spe_enabled)) { \
8197 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8198 return; \
8200 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8201 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8203 #define GEN_SPEFPUOP_COMP_64(name) \
8204 static inline void gen_##name(DisasContext *ctx) \
8206 TCGv_i64 t0, t1; \
8207 if (unlikely(!ctx->spe_enabled)) { \
8208 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8209 return; \
8211 t0 = tcg_temp_new_i64(); \
8212 t1 = tcg_temp_new_i64(); \
8213 gen_load_gpr64(t0, rA(ctx->opcode)); \
8214 gen_load_gpr64(t1, rB(ctx->opcode)); \
8215 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8216 tcg_temp_free_i64(t0); \
8217 tcg_temp_free_i64(t1); \
8219 #endif
8221 /* Single precision floating-point vectors operations */
8222 /* Arithmetic */
8223 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8224 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8225 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8226 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8227 static inline void gen_evfsabs(DisasContext *ctx)
8229 if (unlikely(!ctx->spe_enabled)) {
8230 gen_exception(ctx, POWERPC_EXCP_SPEU);
8231 return;
8233 #if defined(TARGET_PPC64)
8234 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8235 #else
8236 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8237 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8238 #endif
8240 static inline void gen_evfsnabs(DisasContext *ctx)
8242 if (unlikely(!ctx->spe_enabled)) {
8243 gen_exception(ctx, POWERPC_EXCP_SPEU);
8244 return;
8246 #if defined(TARGET_PPC64)
8247 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8248 #else
8249 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8250 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8251 #endif
8253 static inline void gen_evfsneg(DisasContext *ctx)
8255 if (unlikely(!ctx->spe_enabled)) {
8256 gen_exception(ctx, POWERPC_EXCP_SPEU);
8257 return;
8259 #if defined(TARGET_PPC64)
8260 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8261 #else
8262 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8263 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8264 #endif
8267 /* Conversion */
8268 GEN_SPEFPUOP_CONV_64_64(evfscfui);
8269 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8270 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8271 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8272 GEN_SPEFPUOP_CONV_64_64(evfsctui);
8273 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8274 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8275 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8276 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8277 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8279 /* Comparison */
8280 GEN_SPEFPUOP_COMP_64(evfscmpgt);
8281 GEN_SPEFPUOP_COMP_64(evfscmplt);
8282 GEN_SPEFPUOP_COMP_64(evfscmpeq);
8283 GEN_SPEFPUOP_COMP_64(evfststgt);
8284 GEN_SPEFPUOP_COMP_64(evfststlt);
8285 GEN_SPEFPUOP_COMP_64(evfststeq);
8287 /* Opcodes definitions */
8288 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8289 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8290 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8291 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8292 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8293 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8294 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8295 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8296 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8297 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8298 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8299 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8300 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8301 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8303 /* Single precision floating-point operations */
8304 /* Arithmetic */
8305 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8306 GEN_SPEFPUOP_ARITH2_32_32(efssub);
8307 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8308 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8309 static inline void gen_efsabs(DisasContext *ctx)
8311 if (unlikely(!ctx->spe_enabled)) {
8312 gen_exception(ctx, POWERPC_EXCP_SPEU);
8313 return;
8315 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8317 static inline void gen_efsnabs(DisasContext *ctx)
8319 if (unlikely(!ctx->spe_enabled)) {
8320 gen_exception(ctx, POWERPC_EXCP_SPEU);
8321 return;
8323 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8325 static inline void gen_efsneg(DisasContext *ctx)
8327 if (unlikely(!ctx->spe_enabled)) {
8328 gen_exception(ctx, POWERPC_EXCP_SPEU);
8329 return;
8331 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8334 /* Conversion */
8335 GEN_SPEFPUOP_CONV_32_32(efscfui);
8336 GEN_SPEFPUOP_CONV_32_32(efscfsi);
8337 GEN_SPEFPUOP_CONV_32_32(efscfuf);
8338 GEN_SPEFPUOP_CONV_32_32(efscfsf);
8339 GEN_SPEFPUOP_CONV_32_32(efsctui);
8340 GEN_SPEFPUOP_CONV_32_32(efsctsi);
8341 GEN_SPEFPUOP_CONV_32_32(efsctuf);
8342 GEN_SPEFPUOP_CONV_32_32(efsctsf);
8343 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8344 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8345 GEN_SPEFPUOP_CONV_32_64(efscfd);
8347 /* Comparison */
8348 GEN_SPEFPUOP_COMP_32(efscmpgt);
8349 GEN_SPEFPUOP_COMP_32(efscmplt);
8350 GEN_SPEFPUOP_COMP_32(efscmpeq);
8351 GEN_SPEFPUOP_COMP_32(efststgt);
8352 GEN_SPEFPUOP_COMP_32(efststlt);
8353 GEN_SPEFPUOP_COMP_32(efststeq);
8355 /* Opcodes definitions */
8356 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8357 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
8358 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8359 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
8360 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8361 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE); //
8362 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8363 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8364 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8365 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
8366 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8367 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8368 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
8369 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
8371 /* Double precision floating-point operations */
8372 /* Arithmetic */
8373 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8374 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8375 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8376 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8377 static inline void gen_efdabs(DisasContext *ctx)
8379 if (unlikely(!ctx->spe_enabled)) {
8380 gen_exception(ctx, POWERPC_EXCP_SPEU);
8381 return;
8383 #if defined(TARGET_PPC64)
8384 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8385 #else
8386 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8387 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8388 #endif
8390 static inline void gen_efdnabs(DisasContext *ctx)
8392 if (unlikely(!ctx->spe_enabled)) {
8393 gen_exception(ctx, POWERPC_EXCP_SPEU);
8394 return;
8396 #if defined(TARGET_PPC64)
8397 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8398 #else
8399 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8400 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8401 #endif
8403 static inline void gen_efdneg(DisasContext *ctx)
8405 if (unlikely(!ctx->spe_enabled)) {
8406 gen_exception(ctx, POWERPC_EXCP_SPEU);
8407 return;
8409 #if defined(TARGET_PPC64)
8410 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8411 #else
8412 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8413 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8414 #endif
8417 /* Conversion */
8418 GEN_SPEFPUOP_CONV_64_32(efdcfui);
8419 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8420 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8421 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8422 GEN_SPEFPUOP_CONV_32_64(efdctui);
8423 GEN_SPEFPUOP_CONV_32_64(efdctsi);
8424 GEN_SPEFPUOP_CONV_32_64(efdctuf);
8425 GEN_SPEFPUOP_CONV_32_64(efdctsf);
8426 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8427 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8428 GEN_SPEFPUOP_CONV_64_32(efdcfs);
8429 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8430 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8431 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8432 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8434 /* Comparison */
8435 GEN_SPEFPUOP_COMP_64(efdcmpgt);
8436 GEN_SPEFPUOP_COMP_64(efdcmplt);
8437 GEN_SPEFPUOP_COMP_64(efdcmpeq);
8438 GEN_SPEFPUOP_COMP_64(efdtstgt);
8439 GEN_SPEFPUOP_COMP_64(efdtstlt);
8440 GEN_SPEFPUOP_COMP_64(efdtsteq);
8442 /* Opcodes definitions */
8443 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8444 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8445 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE); //
8446 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8447 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
8448 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8449 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8450 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE); //
8451 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8452 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8453 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8454 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
8455 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8456 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8457 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
8458 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
8460 static opcode_t opcodes[] = {
8461 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8462 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8463 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8464 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8465 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8466 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8467 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8468 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8469 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8470 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8471 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8472 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8473 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8474 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8475 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8476 #if defined(TARGET_PPC64)
8477 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8478 #endif
8479 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8480 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8481 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8482 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8483 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8484 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8485 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8486 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8487 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8488 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8489 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8490 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8491 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8492 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8493 #if defined(TARGET_PPC64)
8494 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8495 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8496 #endif
8497 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8498 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8499 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8500 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8501 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8502 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8503 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8504 #if defined(TARGET_PPC64)
8505 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8506 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8507 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8508 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8509 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8510 #endif
8511 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8512 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8513 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8514 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8515 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8516 GEN_HANDLER(fabs, 0x3F, 0x08, 0x08, 0x001F0000, PPC_FLOAT),
8517 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8518 GEN_HANDLER(fnabs, 0x3F, 0x08, 0x04, 0x001F0000, PPC_FLOAT),
8519 GEN_HANDLER(fneg, 0x3F, 0x08, 0x01, 0x001F0000, PPC_FLOAT),
8520 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8521 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8522 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8523 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8524 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00010000, PPC_FLOAT),
8525 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT),
8526 #if defined(TARGET_PPC64)
8527 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8528 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8529 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8530 #endif
8531 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8532 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8533 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8534 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8535 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8536 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8537 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8538 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8539 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8540 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8541 #if defined(TARGET_PPC64)
8542 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8543 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8544 #endif
8545 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8546 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8547 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8548 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8549 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8550 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8551 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8552 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8553 #if defined(TARGET_PPC64)
8554 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8555 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8556 #endif
8557 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8558 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8559 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8560 #if defined(TARGET_PPC64)
8561 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8562 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8563 #endif
8564 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8565 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8566 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8567 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8568 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8569 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8570 #if defined(TARGET_PPC64)
8571 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8572 #endif
8573 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8574 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8575 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8576 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8577 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8578 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8579 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8580 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
8581 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8582 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8583 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8584 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8585 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8586 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8587 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8588 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8589 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8590 #if defined(TARGET_PPC64)
8591 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8592 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8593 PPC_SEGMENT_64B),
8594 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8595 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8596 PPC_SEGMENT_64B),
8597 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8598 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8599 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8600 #endif
8601 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8602 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8603 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8604 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8605 #if defined(TARGET_PPC64)
8606 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8607 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8608 #endif
8609 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8610 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8611 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8612 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8613 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8614 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8615 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8616 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8617 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8618 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8619 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8620 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8621 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8622 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8623 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8624 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8625 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8626 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8627 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8628 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8629 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8630 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8631 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8632 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8633 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8634 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8635 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8636 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8637 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8638 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8639 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8640 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8641 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8642 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8643 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8644 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8645 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8646 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8647 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8648 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8649 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8650 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8651 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8652 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8653 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8654 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8655 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8656 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8657 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8658 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8659 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8660 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8661 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8662 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8663 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8664 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8665 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8666 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8667 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8668 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8669 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8670 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8671 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8672 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8673 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8674 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8675 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8676 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8677 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8678 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8679 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8680 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8681 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8682 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8683 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8684 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8685 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8686 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8687 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8688 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8689 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8690 PPC_NONE, PPC2_BOOKE206),
8691 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8692 PPC_NONE, PPC2_BOOKE206),
8693 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8694 PPC_NONE, PPC2_BOOKE206),
8695 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8696 PPC_NONE, PPC2_BOOKE206),
8697 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
8698 PPC_NONE, PPC2_BOOKE206),
8699 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
8700 PPC_NONE, PPC2_PRCNTL),
8701 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
8702 PPC_NONE, PPC2_PRCNTL),
8703 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8704 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8705 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8706 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8707 PPC_BOOKE, PPC2_BOOKE206),
8708 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE),
8709 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8710 PPC_BOOKE, PPC2_BOOKE206),
8711 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8712 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8713 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8714 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8715 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8716 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8717 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8718 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8719 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8720 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8722 #undef GEN_INT_ARITH_ADD
8723 #undef GEN_INT_ARITH_ADD_CONST
8724 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8725 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8726 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8727 add_ca, compute_ca, compute_ov) \
8728 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8729 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8730 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8731 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8732 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8733 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8734 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8735 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8736 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8737 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8738 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8740 #undef GEN_INT_ARITH_DIVW
8741 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8742 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8743 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8744 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8745 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8746 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8748 #if defined(TARGET_PPC64)
8749 #undef GEN_INT_ARITH_DIVD
8750 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8751 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8752 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8753 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8754 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8755 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8757 #undef GEN_INT_ARITH_MUL_HELPER
8758 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8759 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8760 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8761 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8762 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8763 #endif
8765 #undef GEN_INT_ARITH_SUBF
8766 #undef GEN_INT_ARITH_SUBF_CONST
8767 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8768 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8769 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8770 add_ca, compute_ca, compute_ov) \
8771 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8772 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8773 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8774 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8775 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8776 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8777 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8778 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8779 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8780 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8781 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8783 #undef GEN_LOGICAL1
8784 #undef GEN_LOGICAL2
8785 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8786 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8787 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8788 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8789 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8790 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8791 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8792 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8793 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8794 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8795 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8796 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8797 #if defined(TARGET_PPC64)
8798 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8799 #endif
8801 #if defined(TARGET_PPC64)
8802 #undef GEN_PPC64_R2
8803 #undef GEN_PPC64_R4
8804 #define GEN_PPC64_R2(name, opc1, opc2) \
8805 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8806 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8807 PPC_64B)
8808 #define GEN_PPC64_R4(name, opc1, opc2) \
8809 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8810 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8811 PPC_64B), \
8812 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8813 PPC_64B), \
8814 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8815 PPC_64B)
8816 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8817 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8818 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8819 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8820 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8821 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8822 #endif
8824 #undef _GEN_FLOAT_ACB
8825 #undef GEN_FLOAT_ACB
8826 #undef _GEN_FLOAT_AB
8827 #undef GEN_FLOAT_AB
8828 #undef _GEN_FLOAT_AC
8829 #undef GEN_FLOAT_AC
8830 #undef GEN_FLOAT_B
8831 #undef GEN_FLOAT_BS
8832 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
8833 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
8834 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
8835 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
8836 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
8837 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8838 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8839 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
8840 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8841 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8842 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8843 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8844 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
8845 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8846 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8847 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
8848 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
8849 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
8850 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
8852 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
8853 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
8854 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
8855 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
8856 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
8857 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
8858 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
8859 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
8860 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
8861 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
8862 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
8863 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
8864 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
8865 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
8866 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
8867 #if defined(TARGET_PPC64)
8868 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
8869 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
8870 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
8871 #endif
8872 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
8873 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
8874 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
8875 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
8877 #undef GEN_LD
8878 #undef GEN_LDU
8879 #undef GEN_LDUX
8880 #undef GEN_LDX_E
8881 #undef GEN_LDS
8882 #define GEN_LD(name, ldop, opc, type) \
8883 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8884 #define GEN_LDU(name, ldop, opc, type) \
8885 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8886 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8887 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8888 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
8889 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
8890 #define GEN_LDS(name, ldop, op, type) \
8891 GEN_LD(name, ldop, op | 0x20, type) \
8892 GEN_LDU(name, ldop, op | 0x21, type) \
8893 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8894 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8896 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8897 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8898 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8899 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8900 #if defined(TARGET_PPC64)
8901 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8902 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8903 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
8904 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
8905 GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX)
8906 #endif
8907 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8908 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8910 #undef GEN_ST
8911 #undef GEN_STU
8912 #undef GEN_STUX
8913 #undef GEN_STX_E
8914 #undef GEN_STS
8915 #define GEN_ST(name, stop, opc, type) \
8916 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8917 #define GEN_STU(name, stop, opc, type) \
8918 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8919 #define GEN_STUX(name, stop, opc2, opc3, type) \
8920 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8921 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
8922 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
8923 #define GEN_STS(name, stop, op, type) \
8924 GEN_ST(name, stop, op | 0x20, type) \
8925 GEN_STU(name, stop, op | 0x21, type) \
8926 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8927 GEN_STX(name, stop, 0x17, op | 0x00, type)
8929 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8930 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8931 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8932 #if defined(TARGET_PPC64)
8933 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
8934 GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
8935 GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX)
8936 #endif
8937 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8938 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8940 #undef GEN_LDF
8941 #undef GEN_LDUF
8942 #undef GEN_LDUXF
8943 #undef GEN_LDXF
8944 #undef GEN_LDFS
8945 #define GEN_LDF(name, ldop, opc, type) \
8946 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8947 #define GEN_LDUF(name, ldop, opc, type) \
8948 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8949 #define GEN_LDUXF(name, ldop, opc, type) \
8950 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8951 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
8952 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8953 #define GEN_LDFS(name, ldop, op, type) \
8954 GEN_LDF(name, ldop, op | 0x20, type) \
8955 GEN_LDUF(name, ldop, op | 0x21, type) \
8956 GEN_LDUXF(name, ldop, op | 0x01, type) \
8957 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
8959 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
8960 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
8962 #undef GEN_STF
8963 #undef GEN_STUF
8964 #undef GEN_STUXF
8965 #undef GEN_STXF
8966 #undef GEN_STFS
8967 #define GEN_STF(name, stop, opc, type) \
8968 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8969 #define GEN_STUF(name, stop, opc, type) \
8970 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8971 #define GEN_STUXF(name, stop, opc, type) \
8972 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8973 #define GEN_STXF(name, stop, opc2, opc3, type) \
8974 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8975 #define GEN_STFS(name, stop, op, type) \
8976 GEN_STF(name, stop, op | 0x20, type) \
8977 GEN_STUF(name, stop, op | 0x21, type) \
8978 GEN_STUXF(name, stop, op | 0x01, type) \
8979 GEN_STXF(name, stop, 0x17, op | 0x00, type)
8981 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
8982 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
8983 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
8985 #undef GEN_CRLOGIC
8986 #define GEN_CRLOGIC(name, tcg_op, opc) \
8987 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8988 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8989 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8990 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8991 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8992 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8993 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8994 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8995 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8997 #undef GEN_MAC_HANDLER
8998 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8999 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
9000 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
9001 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
9002 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
9003 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
9004 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
9005 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
9006 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
9007 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
9008 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
9009 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
9010 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
9011 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
9012 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
9013 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
9014 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
9015 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
9016 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
9017 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
9018 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
9019 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
9020 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
9021 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
9022 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
9023 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
9024 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
9025 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
9026 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
9027 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
9028 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
9029 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
9030 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
9031 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
9032 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
9033 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
9034 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
9035 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
9036 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
9037 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
9038 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
9039 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
9040 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
9041 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
9043 #undef GEN_VR_LDX
9044 #undef GEN_VR_STX
9045 #undef GEN_VR_LVE
9046 #undef GEN_VR_STVE
9047 #define GEN_VR_LDX(name, opc2, opc3) \
9048 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9049 #define GEN_VR_STX(name, opc2, opc3) \
9050 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9051 #define GEN_VR_LVE(name, opc2, opc3) \
9052 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9053 #define GEN_VR_STVE(name, opc2, opc3) \
9054 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9055 GEN_VR_LDX(lvx, 0x07, 0x03),
9056 GEN_VR_LDX(lvxl, 0x07, 0x0B),
9057 GEN_VR_LVE(bx, 0x07, 0x00),
9058 GEN_VR_LVE(hx, 0x07, 0x01),
9059 GEN_VR_LVE(wx, 0x07, 0x02),
9060 GEN_VR_STX(svx, 0x07, 0x07),
9061 GEN_VR_STX(svxl, 0x07, 0x0F),
9062 GEN_VR_STVE(bx, 0x07, 0x04),
9063 GEN_VR_STVE(hx, 0x07, 0x05),
9064 GEN_VR_STVE(wx, 0x07, 0x06),
9066 #undef GEN_VX_LOGICAL
9067 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
9068 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9069 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
9070 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
9071 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
9072 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
9073 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
9075 #undef GEN_VXFORM
9076 #define GEN_VXFORM(name, opc2, opc3) \
9077 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9078 GEN_VXFORM(vaddubm, 0, 0),
9079 GEN_VXFORM(vadduhm, 0, 1),
9080 GEN_VXFORM(vadduwm, 0, 2),
9081 GEN_VXFORM(vsububm, 0, 16),
9082 GEN_VXFORM(vsubuhm, 0, 17),
9083 GEN_VXFORM(vsubuwm, 0, 18),
9084 GEN_VXFORM(vmaxub, 1, 0),
9085 GEN_VXFORM(vmaxuh, 1, 1),
9086 GEN_VXFORM(vmaxuw, 1, 2),
9087 GEN_VXFORM(vmaxsb, 1, 4),
9088 GEN_VXFORM(vmaxsh, 1, 5),
9089 GEN_VXFORM(vmaxsw, 1, 6),
9090 GEN_VXFORM(vminub, 1, 8),
9091 GEN_VXFORM(vminuh, 1, 9),
9092 GEN_VXFORM(vminuw, 1, 10),
9093 GEN_VXFORM(vminsb, 1, 12),
9094 GEN_VXFORM(vminsh, 1, 13),
9095 GEN_VXFORM(vminsw, 1, 14),
9096 GEN_VXFORM(vavgub, 1, 16),
9097 GEN_VXFORM(vavguh, 1, 17),
9098 GEN_VXFORM(vavguw, 1, 18),
9099 GEN_VXFORM(vavgsb, 1, 20),
9100 GEN_VXFORM(vavgsh, 1, 21),
9101 GEN_VXFORM(vavgsw, 1, 22),
9102 GEN_VXFORM(vmrghb, 6, 0),
9103 GEN_VXFORM(vmrghh, 6, 1),
9104 GEN_VXFORM(vmrghw, 6, 2),
9105 GEN_VXFORM(vmrglb, 6, 4),
9106 GEN_VXFORM(vmrglh, 6, 5),
9107 GEN_VXFORM(vmrglw, 6, 6),
9108 GEN_VXFORM(vmuloub, 4, 0),
9109 GEN_VXFORM(vmulouh, 4, 1),
9110 GEN_VXFORM(vmulosb, 4, 4),
9111 GEN_VXFORM(vmulosh, 4, 5),
9112 GEN_VXFORM(vmuleub, 4, 8),
9113 GEN_VXFORM(vmuleuh, 4, 9),
9114 GEN_VXFORM(vmulesb, 4, 12),
9115 GEN_VXFORM(vmulesh, 4, 13),
9116 GEN_VXFORM(vslb, 2, 4),
9117 GEN_VXFORM(vslh, 2, 5),
9118 GEN_VXFORM(vslw, 2, 6),
9119 GEN_VXFORM(vsrb, 2, 8),
9120 GEN_VXFORM(vsrh, 2, 9),
9121 GEN_VXFORM(vsrw, 2, 10),
9122 GEN_VXFORM(vsrab, 2, 12),
9123 GEN_VXFORM(vsrah, 2, 13),
9124 GEN_VXFORM(vsraw, 2, 14),
9125 GEN_VXFORM(vslo, 6, 16),
9126 GEN_VXFORM(vsro, 6, 17),
9127 GEN_VXFORM(vaddcuw, 0, 6),
9128 GEN_VXFORM(vsubcuw, 0, 22),
9129 GEN_VXFORM(vaddubs, 0, 8),
9130 GEN_VXFORM(vadduhs, 0, 9),
9131 GEN_VXFORM(vadduws, 0, 10),
9132 GEN_VXFORM(vaddsbs, 0, 12),
9133 GEN_VXFORM(vaddshs, 0, 13),
9134 GEN_VXFORM(vaddsws, 0, 14),
9135 GEN_VXFORM(vsububs, 0, 24),
9136 GEN_VXFORM(vsubuhs, 0, 25),
9137 GEN_VXFORM(vsubuws, 0, 26),
9138 GEN_VXFORM(vsubsbs, 0, 28),
9139 GEN_VXFORM(vsubshs, 0, 29),
9140 GEN_VXFORM(vsubsws, 0, 30),
9141 GEN_VXFORM(vrlb, 2, 0),
9142 GEN_VXFORM(vrlh, 2, 1),
9143 GEN_VXFORM(vrlw, 2, 2),
9144 GEN_VXFORM(vsl, 2, 7),
9145 GEN_VXFORM(vsr, 2, 11),
9146 GEN_VXFORM(vpkuhum, 7, 0),
9147 GEN_VXFORM(vpkuwum, 7, 1),
9148 GEN_VXFORM(vpkuhus, 7, 2),
9149 GEN_VXFORM(vpkuwus, 7, 3),
9150 GEN_VXFORM(vpkshus, 7, 4),
9151 GEN_VXFORM(vpkswus, 7, 5),
9152 GEN_VXFORM(vpkshss, 7, 6),
9153 GEN_VXFORM(vpkswss, 7, 7),
9154 GEN_VXFORM(vpkpx, 7, 12),
9155 GEN_VXFORM(vsum4ubs, 4, 24),
9156 GEN_VXFORM(vsum4sbs, 4, 28),
9157 GEN_VXFORM(vsum4shs, 4, 25),
9158 GEN_VXFORM(vsum2sws, 4, 26),
9159 GEN_VXFORM(vsumsws, 4, 30),
9160 GEN_VXFORM(vaddfp, 5, 0),
9161 GEN_VXFORM(vsubfp, 5, 1),
9162 GEN_VXFORM(vmaxfp, 5, 16),
9163 GEN_VXFORM(vminfp, 5, 17),
9165 #undef GEN_VXRFORM1
9166 #undef GEN_VXRFORM
9167 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
9168 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
9169 #define GEN_VXRFORM(name, opc2, opc3) \
9170 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
9171 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
9172 GEN_VXRFORM(vcmpequb, 3, 0)
9173 GEN_VXRFORM(vcmpequh, 3, 1)
9174 GEN_VXRFORM(vcmpequw, 3, 2)
9175 GEN_VXRFORM(vcmpgtsb, 3, 12)
9176 GEN_VXRFORM(vcmpgtsh, 3, 13)
9177 GEN_VXRFORM(vcmpgtsw, 3, 14)
9178 GEN_VXRFORM(vcmpgtub, 3, 8)
9179 GEN_VXRFORM(vcmpgtuh, 3, 9)
9180 GEN_VXRFORM(vcmpgtuw, 3, 10)
9181 GEN_VXRFORM(vcmpeqfp, 3, 3)
9182 GEN_VXRFORM(vcmpgefp, 3, 7)
9183 GEN_VXRFORM(vcmpgtfp, 3, 11)
9184 GEN_VXRFORM(vcmpbfp, 3, 15)
9186 #undef GEN_VXFORM_SIMM
9187 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
9188 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9189 GEN_VXFORM_SIMM(vspltisb, 6, 12),
9190 GEN_VXFORM_SIMM(vspltish, 6, 13),
9191 GEN_VXFORM_SIMM(vspltisw, 6, 14),
9193 #undef GEN_VXFORM_NOA
9194 #define GEN_VXFORM_NOA(name, opc2, opc3) \
9195 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9196 GEN_VXFORM_NOA(vupkhsb, 7, 8),
9197 GEN_VXFORM_NOA(vupkhsh, 7, 9),
9198 GEN_VXFORM_NOA(vupklsb, 7, 10),
9199 GEN_VXFORM_NOA(vupklsh, 7, 11),
9200 GEN_VXFORM_NOA(vupkhpx, 7, 13),
9201 GEN_VXFORM_NOA(vupklpx, 7, 15),
9202 GEN_VXFORM_NOA(vrefp, 5, 4),
9203 GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9204 GEN_VXFORM_NOA(vexptefp, 5, 6),
9205 GEN_VXFORM_NOA(vlogefp, 5, 7),
9206 GEN_VXFORM_NOA(vrfim, 5, 8),
9207 GEN_VXFORM_NOA(vrfin, 5, 9),
9208 GEN_VXFORM_NOA(vrfip, 5, 10),
9209 GEN_VXFORM_NOA(vrfiz, 5, 11),
9211 #undef GEN_VXFORM_UIMM
9212 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
9213 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9214 GEN_VXFORM_UIMM(vspltb, 6, 8),
9215 GEN_VXFORM_UIMM(vsplth, 6, 9),
9216 GEN_VXFORM_UIMM(vspltw, 6, 10),
9217 GEN_VXFORM_UIMM(vcfux, 5, 12),
9218 GEN_VXFORM_UIMM(vcfsx, 5, 13),
9219 GEN_VXFORM_UIMM(vctuxs, 5, 14),
9220 GEN_VXFORM_UIMM(vctsxs, 5, 15),
9222 #undef GEN_VAFORM_PAIRED
9223 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9224 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9225 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9226 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9227 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9228 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9229 GEN_VAFORM_PAIRED(vsel, vperm, 21),
9230 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9232 #undef GEN_SPE
9233 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
9234 GEN_OPCODE_DUAL(name0##_##name1, 0x04, opc2, opc3, inval0, inval1, type, PPC_NONE)
9235 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9236 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9237 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9238 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9239 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9240 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9241 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
9242 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE),
9243 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE),
9244 GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9245 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9246 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9247 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9248 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9249 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
9250 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE),
9251 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
9252 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9253 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9254 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9255 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9256 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
9257 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9258 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
9259 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9260 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE),
9261 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9262 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE),
9263 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE),
9265 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9266 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9267 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9268 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9269 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9270 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9271 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9272 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9273 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9274 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9275 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9276 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9277 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9278 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9280 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9281 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
9282 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
9283 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
9284 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9285 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE),
9286 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9287 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9288 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9289 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
9290 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9291 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9292 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
9293 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
9295 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9296 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9297 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE),
9298 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9299 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
9300 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9301 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9302 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE),
9303 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9304 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9305 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9306 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
9307 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9308 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9309 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
9310 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
9312 #undef GEN_SPEOP_LDST
9313 #define GEN_SPEOP_LDST(name, opc2, sh) \
9314 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9315 GEN_SPEOP_LDST(evldd, 0x00, 3),
9316 GEN_SPEOP_LDST(evldw, 0x01, 3),
9317 GEN_SPEOP_LDST(evldh, 0x02, 3),
9318 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9319 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9320 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9321 GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9322 GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9323 GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9324 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9325 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9327 GEN_SPEOP_LDST(evstdd, 0x10, 3),
9328 GEN_SPEOP_LDST(evstdw, 0x11, 3),
9329 GEN_SPEOP_LDST(evstdh, 0x12, 3),
9330 GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9331 GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9332 GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9333 GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9336 #include "helper_regs.h"
9337 #include "translate_init.c"
9339 /*****************************************************************************/
9340 /* Misc PowerPC helpers */
9341 void cpu_dump_state (CPUPPCState *env, FILE *f, fprintf_function cpu_fprintf,
9342 int flags)
9344 #define RGPL 4
9345 #define RFPL 4
9347 int i;
9349 cpu_synchronize_state(env);
9351 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9352 TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9353 env->nip, env->lr, env->ctr, cpu_read_xer(env));
9354 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9355 TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9356 env->hflags, env->mmu_idx);
9357 #if !defined(NO_TIMER_DUMP)
9358 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9359 #if !defined(CONFIG_USER_ONLY)
9360 " DECR %08" PRIu32
9361 #endif
9362 "\n",
9363 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9364 #if !defined(CONFIG_USER_ONLY)
9365 , cpu_ppc_load_decr(env)
9366 #endif
9368 #endif
9369 for (i = 0; i < 32; i++) {
9370 if ((i & (RGPL - 1)) == 0)
9371 cpu_fprintf(f, "GPR%02d", i);
9372 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9373 if ((i & (RGPL - 1)) == (RGPL - 1))
9374 cpu_fprintf(f, "\n");
9376 cpu_fprintf(f, "CR ");
9377 for (i = 0; i < 8; i++)
9378 cpu_fprintf(f, "%01x", env->crf[i]);
9379 cpu_fprintf(f, " [");
9380 for (i = 0; i < 8; i++) {
9381 char a = '-';
9382 if (env->crf[i] & 0x08)
9383 a = 'L';
9384 else if (env->crf[i] & 0x04)
9385 a = 'G';
9386 else if (env->crf[i] & 0x02)
9387 a = 'E';
9388 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9390 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9391 env->reserve_addr);
9392 for (i = 0; i < 32; i++) {
9393 if ((i & (RFPL - 1)) == 0)
9394 cpu_fprintf(f, "FPR%02d", i);
9395 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9396 if ((i & (RFPL - 1)) == (RFPL - 1))
9397 cpu_fprintf(f, "\n");
9399 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr);
9400 #if !defined(CONFIG_USER_ONLY)
9401 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9402 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9403 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9404 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9406 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9407 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9408 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9409 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9411 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9412 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9413 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9414 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9416 if (env->excp_model == POWERPC_EXCP_BOOKE) {
9417 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9418 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9419 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9420 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9422 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9423 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9424 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9425 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9427 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9428 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9429 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9430 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9432 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9433 " EPR " TARGET_FMT_lx "\n",
9434 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9435 env->spr[SPR_BOOKE_EPR]);
9437 /* FSL-specific */
9438 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9439 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9440 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9441 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9444 * IVORs are left out as they are large and do not change often --
9445 * they can be read with "p $ivor0", "p $ivor1", etc.
9449 #if defined(TARGET_PPC64)
9450 if (env->flags & POWERPC_FLAG_CFAR) {
9451 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar);
9453 #endif
9455 switch (env->mmu_model) {
9456 case POWERPC_MMU_32B:
9457 case POWERPC_MMU_601:
9458 case POWERPC_MMU_SOFT_6xx:
9459 case POWERPC_MMU_SOFT_74xx:
9460 #if defined(TARGET_PPC64)
9461 case POWERPC_MMU_64B:
9462 #endif
9463 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9464 break;
9465 case POWERPC_MMU_BOOKE206:
9466 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9467 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9468 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9469 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9471 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9472 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9473 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9474 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9476 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9477 " TLB1CFG " TARGET_FMT_lx "\n",
9478 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9479 env->spr[SPR_BOOKE_TLB1CFG]);
9480 break;
9481 default:
9482 break;
9484 #endif
9486 #undef RGPL
9487 #undef RFPL
9490 void cpu_dump_statistics (CPUPPCState *env, FILE*f, fprintf_function cpu_fprintf,
9491 int flags)
9493 #if defined(DO_PPC_STATISTICS)
9494 opc_handler_t **t1, **t2, **t3, *handler;
9495 int op1, op2, op3;
9497 t1 = env->opcodes;
9498 for (op1 = 0; op1 < 64; op1++) {
9499 handler = t1[op1];
9500 if (is_indirect_opcode(handler)) {
9501 t2 = ind_table(handler);
9502 for (op2 = 0; op2 < 32; op2++) {
9503 handler = t2[op2];
9504 if (is_indirect_opcode(handler)) {
9505 t3 = ind_table(handler);
9506 for (op3 = 0; op3 < 32; op3++) {
9507 handler = t3[op3];
9508 if (handler->count == 0)
9509 continue;
9510 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9511 "%016" PRIx64 " %" PRId64 "\n",
9512 op1, op2, op3, op1, (op3 << 5) | op2,
9513 handler->oname,
9514 handler->count, handler->count);
9516 } else {
9517 if (handler->count == 0)
9518 continue;
9519 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9520 "%016" PRIx64 " %" PRId64 "\n",
9521 op1, op2, op1, op2, handler->oname,
9522 handler->count, handler->count);
9525 } else {
9526 if (handler->count == 0)
9527 continue;
9528 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9529 " %" PRId64 "\n",
9530 op1, op1, handler->oname,
9531 handler->count, handler->count);
9534 #endif
9537 /*****************************************************************************/
9538 static inline void gen_intermediate_code_internal(CPUPPCState *env,
9539 TranslationBlock *tb,
9540 int search_pc)
9542 DisasContext ctx, *ctxp = &ctx;
9543 opc_handler_t **table, *handler;
9544 target_ulong pc_start;
9545 uint16_t *gen_opc_end;
9546 CPUBreakpoint *bp;
9547 int j, lj = -1;
9548 int num_insns;
9549 int max_insns;
9551 pc_start = tb->pc;
9552 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
9553 ctx.nip = pc_start;
9554 ctx.tb = tb;
9555 ctx.exception = POWERPC_EXCP_NONE;
9556 ctx.spr_cb = env->spr_cb;
9557 ctx.mem_idx = env->mmu_idx;
9558 ctx.access_type = -1;
9559 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9560 #if defined(TARGET_PPC64)
9561 ctx.sf_mode = msr_is_64bit(env, env->msr);
9562 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
9563 #endif
9564 ctx.fpu_enabled = msr_fp;
9565 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9566 ctx.spe_enabled = msr_spe;
9567 else
9568 ctx.spe_enabled = 0;
9569 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9570 ctx.altivec_enabled = msr_vr;
9571 else
9572 ctx.altivec_enabled = 0;
9573 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9574 ctx.singlestep_enabled = CPU_SINGLE_STEP;
9575 else
9576 ctx.singlestep_enabled = 0;
9577 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9578 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9579 if (unlikely(env->singlestep_enabled))
9580 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9581 #if defined (DO_SINGLE_STEP) && 0
9582 /* Single step trace mode */
9583 msr_se = 1;
9584 #endif
9585 num_insns = 0;
9586 max_insns = tb->cflags & CF_COUNT_MASK;
9587 if (max_insns == 0)
9588 max_insns = CF_COUNT_MASK;
9590 gen_tb_start();
9591 /* Set env in case of segfault during code fetch */
9592 while (ctx.exception == POWERPC_EXCP_NONE
9593 && tcg_ctx.gen_opc_ptr < gen_opc_end) {
9594 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9595 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9596 if (bp->pc == ctx.nip) {
9597 gen_debug_exception(ctxp);
9598 break;
9602 if (unlikely(search_pc)) {
9603 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
9604 if (lj < j) {
9605 lj++;
9606 while (lj < j)
9607 tcg_ctx.gen_opc_instr_start[lj++] = 0;
9609 tcg_ctx.gen_opc_pc[lj] = ctx.nip;
9610 tcg_ctx.gen_opc_instr_start[lj] = 1;
9611 tcg_ctx.gen_opc_icount[lj] = num_insns;
9613 LOG_DISAS("----------------\n");
9614 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9615 ctx.nip, ctx.mem_idx, (int)msr_ir);
9616 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9617 gen_io_start();
9618 if (unlikely(ctx.le_mode)) {
9619 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip));
9620 } else {
9621 ctx.opcode = cpu_ldl_code(env, ctx.nip);
9623 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9624 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9625 opc3(ctx.opcode), ctx.le_mode ? "little" : "big");
9626 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
9627 tcg_gen_debug_insn_start(ctx.nip);
9629 ctx.nip += 4;
9630 table = env->opcodes;
9631 num_insns++;
9632 handler = table[opc1(ctx.opcode)];
9633 if (is_indirect_opcode(handler)) {
9634 table = ind_table(handler);
9635 handler = table[opc2(ctx.opcode)];
9636 if (is_indirect_opcode(handler)) {
9637 table = ind_table(handler);
9638 handler = table[opc3(ctx.opcode)];
9641 /* Is opcode *REALLY* valid ? */
9642 if (unlikely(handler->handler == &gen_invalid)) {
9643 if (qemu_log_enabled()) {
9644 qemu_log("invalid/unsupported opcode: "
9645 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9646 opc1(ctx.opcode), opc2(ctx.opcode),
9647 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9649 } else {
9650 uint32_t inval;
9652 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) {
9653 inval = handler->inval2;
9654 } else {
9655 inval = handler->inval1;
9658 if (unlikely((ctx.opcode & inval) != 0)) {
9659 if (qemu_log_enabled()) {
9660 qemu_log("invalid bits: %08x for opcode: "
9661 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9662 ctx.opcode & inval, opc1(ctx.opcode),
9663 opc2(ctx.opcode), opc3(ctx.opcode),
9664 ctx.opcode, ctx.nip - 4);
9666 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9667 break;
9670 (*(handler->handler))(&ctx);
9671 #if defined(DO_PPC_STATISTICS)
9672 handler->count++;
9673 #endif
9674 /* Check trace mode exceptions */
9675 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9676 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9677 ctx.exception != POWERPC_SYSCALL &&
9678 ctx.exception != POWERPC_EXCP_TRAP &&
9679 ctx.exception != POWERPC_EXCP_BRANCH)) {
9680 gen_exception(ctxp, POWERPC_EXCP_TRACE);
9681 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9682 (env->singlestep_enabled) ||
9683 singlestep ||
9684 num_insns >= max_insns)) {
9685 /* if we reach a page boundary or are single stepping, stop
9686 * generation
9688 break;
9691 if (tb->cflags & CF_LAST_IO)
9692 gen_io_end();
9693 if (ctx.exception == POWERPC_EXCP_NONE) {
9694 gen_goto_tb(&ctx, 0, ctx.nip);
9695 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9696 if (unlikely(env->singlestep_enabled)) {
9697 gen_debug_exception(ctxp);
9699 /* Generate the return instruction */
9700 tcg_gen_exit_tb(0);
9702 gen_tb_end(tb, num_insns);
9703 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
9704 if (unlikely(search_pc)) {
9705 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
9706 lj++;
9707 while (lj <= j)
9708 tcg_ctx.gen_opc_instr_start[lj++] = 0;
9709 } else {
9710 tb->size = ctx.nip - pc_start;
9711 tb->icount = num_insns;
9713 #if defined(DEBUG_DISAS)
9714 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9715 int flags;
9716 flags = env->bfd_mach;
9717 flags |= ctx.le_mode << 16;
9718 qemu_log("IN: %s\n", lookup_symbol(pc_start));
9719 log_target_disas(env, pc_start, ctx.nip - pc_start, flags);
9720 qemu_log("\n");
9722 #endif
9725 void gen_intermediate_code (CPUPPCState *env, struct TranslationBlock *tb)
9727 gen_intermediate_code_internal(env, tb, 0);
9730 void gen_intermediate_code_pc (CPUPPCState *env, struct TranslationBlock *tb)
9732 gen_intermediate_code_internal(env, tb, 1);
9735 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, int pc_pos)
9737 env->nip = tcg_ctx.gen_opc_pc[pc_pos];