Merge remote-tracking branch 'agraf/xen-next' into staging
[qemu.git] / target-ppc / translate.c
bloba943dbcf8e2bf66b2f7ca9d40527f1f32f86b863
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 #include <stdarg.h>
20 #include <stdlib.h>
21 #include <stdio.h>
22 #include <string.h>
23 #include <inttypes.h>
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "tcg-op.h"
29 #include "qemu-common.h"
30 #include "host-utils.h"
32 #include "helper.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define CPU_SINGLE_STEP 0x1
37 #define CPU_BRANCH_STEP 0x2
38 #define GDBSTUB_SINGLE_STEP 0x4
40 /* Include definitions for instructions classes and implementations flags */
41 //#define PPC_DEBUG_DISAS
42 //#define DO_PPC_STATISTICS
44 #ifdef PPC_DEBUG_DISAS
45 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
46 #else
47 # define LOG_DISAS(...) do { } while (0)
48 #endif
49 /*****************************************************************************/
50 /* Code translation helpers */
52 /* global register indexes */
53 static TCGv_ptr cpu_env;
54 static char cpu_reg_names[10*3 + 22*4 /* GPR */
55 #if !defined(TARGET_PPC64)
56 + 10*4 + 22*5 /* SPE GPRh */
57 #endif
58 + 10*4 + 22*5 /* FPR */
59 + 2*(10*6 + 22*7) /* AVRh, AVRl */
60 + 8*5 /* CRF */];
61 static TCGv cpu_gpr[32];
62 #if !defined(TARGET_PPC64)
63 static TCGv cpu_gprh[32];
64 #endif
65 static TCGv_i64 cpu_fpr[32];
66 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
67 static TCGv_i32 cpu_crf[8];
68 static TCGv cpu_nip;
69 static TCGv cpu_msr;
70 static TCGv cpu_ctr;
71 static TCGv cpu_lr;
72 static TCGv cpu_xer;
73 static TCGv cpu_reserve;
74 static TCGv_i32 cpu_fpscr;
75 static TCGv_i32 cpu_access_type;
77 #include "gen-icount.h"
79 void ppc_translate_init(void)
81 int i;
82 char* p;
83 size_t cpu_reg_names_size;
84 static int done_init = 0;
86 if (done_init)
87 return;
89 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
91 p = cpu_reg_names;
92 cpu_reg_names_size = sizeof(cpu_reg_names);
94 for (i = 0; i < 8; i++) {
95 snprintf(p, cpu_reg_names_size, "crf%d", i);
96 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
97 offsetof(CPUState, crf[i]), p);
98 p += 5;
99 cpu_reg_names_size -= 5;
102 for (i = 0; i < 32; i++) {
103 snprintf(p, cpu_reg_names_size, "r%d", i);
104 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
105 offsetof(CPUState, gpr[i]), p);
106 p += (i < 10) ? 3 : 4;
107 cpu_reg_names_size -= (i < 10) ? 3 : 4;
108 #if !defined(TARGET_PPC64)
109 snprintf(p, cpu_reg_names_size, "r%dH", i);
110 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
111 offsetof(CPUState, gprh[i]), p);
112 p += (i < 10) ? 4 : 5;
113 cpu_reg_names_size -= (i < 10) ? 4 : 5;
114 #endif
116 snprintf(p, cpu_reg_names_size, "fp%d", i);
117 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
118 offsetof(CPUState, fpr[i]), p);
119 p += (i < 10) ? 4 : 5;
120 cpu_reg_names_size -= (i < 10) ? 4 : 5;
122 snprintf(p, cpu_reg_names_size, "avr%dH", i);
123 #ifdef HOST_WORDS_BIGENDIAN
124 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
125 offsetof(CPUState, avr[i].u64[0]), p);
126 #else
127 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
128 offsetof(CPUState, avr[i].u64[1]), p);
129 #endif
130 p += (i < 10) ? 6 : 7;
131 cpu_reg_names_size -= (i < 10) ? 6 : 7;
133 snprintf(p, cpu_reg_names_size, "avr%dL", i);
134 #ifdef HOST_WORDS_BIGENDIAN
135 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
136 offsetof(CPUState, avr[i].u64[1]), p);
137 #else
138 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
139 offsetof(CPUState, avr[i].u64[0]), p);
140 #endif
141 p += (i < 10) ? 6 : 7;
142 cpu_reg_names_size -= (i < 10) ? 6 : 7;
145 cpu_nip = tcg_global_mem_new(TCG_AREG0,
146 offsetof(CPUState, nip), "nip");
148 cpu_msr = tcg_global_mem_new(TCG_AREG0,
149 offsetof(CPUState, msr), "msr");
151 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
152 offsetof(CPUState, ctr), "ctr");
154 cpu_lr = tcg_global_mem_new(TCG_AREG0,
155 offsetof(CPUState, lr), "lr");
157 cpu_xer = tcg_global_mem_new(TCG_AREG0,
158 offsetof(CPUState, xer), "xer");
160 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
161 offsetof(CPUState, reserve_addr),
162 "reserve_addr");
164 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
165 offsetof(CPUState, fpscr), "fpscr");
167 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
168 offsetof(CPUState, access_type), "access_type");
170 /* register helpers */
171 #define GEN_HELPER 2
172 #include "helper.h"
174 done_init = 1;
177 /* internal defines */
178 typedef struct DisasContext {
179 struct TranslationBlock *tb;
180 target_ulong nip;
181 uint32_t opcode;
182 uint32_t exception;
183 /* Routine used to access memory */
184 int mem_idx;
185 int access_type;
186 /* Translation flags */
187 int le_mode;
188 #if defined(TARGET_PPC64)
189 int sf_mode;
190 #endif
191 int fpu_enabled;
192 int altivec_enabled;
193 int spe_enabled;
194 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
195 int singlestep_enabled;
196 } DisasContext;
198 struct opc_handler_t {
199 /* invalid bits */
200 uint32_t inval;
201 /* instruction type */
202 uint64_t type;
203 /* handler */
204 void (*handler)(DisasContext *ctx);
205 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
206 const char *oname;
207 #endif
208 #if defined(DO_PPC_STATISTICS)
209 uint64_t count;
210 #endif
213 static inline void gen_reset_fpstatus(void)
215 #ifdef CONFIG_SOFTFLOAT
216 gen_helper_reset_fpstatus();
217 #endif
220 static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
222 TCGv_i32 t0 = tcg_temp_new_i32();
224 if (set_fprf != 0) {
225 /* This case might be optimized later */
226 tcg_gen_movi_i32(t0, 1);
227 gen_helper_compute_fprf(t0, arg, t0);
228 if (unlikely(set_rc)) {
229 tcg_gen_mov_i32(cpu_crf[1], t0);
231 gen_helper_float_check_status();
232 } else if (unlikely(set_rc)) {
233 /* We always need to compute fpcc */
234 tcg_gen_movi_i32(t0, 0);
235 gen_helper_compute_fprf(t0, arg, t0);
236 tcg_gen_mov_i32(cpu_crf[1], t0);
239 tcg_temp_free_i32(t0);
242 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
244 if (ctx->access_type != access_type) {
245 tcg_gen_movi_i32(cpu_access_type, access_type);
246 ctx->access_type = access_type;
250 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
252 #if defined(TARGET_PPC64)
253 if (ctx->sf_mode)
254 tcg_gen_movi_tl(cpu_nip, nip);
255 else
256 #endif
257 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
260 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
262 TCGv_i32 t0, t1;
263 if (ctx->exception == POWERPC_EXCP_NONE) {
264 gen_update_nip(ctx, ctx->nip);
266 t0 = tcg_const_i32(excp);
267 t1 = tcg_const_i32(error);
268 gen_helper_raise_exception_err(t0, t1);
269 tcg_temp_free_i32(t0);
270 tcg_temp_free_i32(t1);
271 ctx->exception = (excp);
274 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
276 TCGv_i32 t0;
277 if (ctx->exception == POWERPC_EXCP_NONE) {
278 gen_update_nip(ctx, ctx->nip);
280 t0 = tcg_const_i32(excp);
281 gen_helper_raise_exception(t0);
282 tcg_temp_free_i32(t0);
283 ctx->exception = (excp);
286 static inline void gen_debug_exception(DisasContext *ctx)
288 TCGv_i32 t0;
290 if (ctx->exception != POWERPC_EXCP_BRANCH)
291 gen_update_nip(ctx, ctx->nip);
292 t0 = tcg_const_i32(EXCP_DEBUG);
293 gen_helper_raise_exception(t0);
294 tcg_temp_free_i32(t0);
297 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
299 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
302 /* Stop translation */
303 static inline void gen_stop_exception(DisasContext *ctx)
305 gen_update_nip(ctx, ctx->nip);
306 ctx->exception = POWERPC_EXCP_STOP;
309 /* No need to update nip here, as execution flow will change */
310 static inline void gen_sync_exception(DisasContext *ctx)
312 ctx->exception = POWERPC_EXCP_SYNC;
315 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
316 GEN_OPCODE(name, opc1, opc2, opc3, inval, type)
318 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
319 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type)
321 typedef struct opcode_t {
322 unsigned char opc1, opc2, opc3;
323 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
324 unsigned char pad[5];
325 #else
326 unsigned char pad[1];
327 #endif
328 opc_handler_t handler;
329 const char *oname;
330 } opcode_t;
332 /*****************************************************************************/
333 /*** Instruction decoding ***/
334 #define EXTRACT_HELPER(name, shift, nb) \
335 static inline uint32_t name(uint32_t opcode) \
337 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
340 #define EXTRACT_SHELPER(name, shift, nb) \
341 static inline int32_t name(uint32_t opcode) \
343 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
346 /* Opcode part 1 */
347 EXTRACT_HELPER(opc1, 26, 6);
348 /* Opcode part 2 */
349 EXTRACT_HELPER(opc2, 1, 5);
350 /* Opcode part 3 */
351 EXTRACT_HELPER(opc3, 6, 5);
352 /* Update Cr0 flags */
353 EXTRACT_HELPER(Rc, 0, 1);
354 /* Destination */
355 EXTRACT_HELPER(rD, 21, 5);
356 /* Source */
357 EXTRACT_HELPER(rS, 21, 5);
358 /* First operand */
359 EXTRACT_HELPER(rA, 16, 5);
360 /* Second operand */
361 EXTRACT_HELPER(rB, 11, 5);
362 /* Third operand */
363 EXTRACT_HELPER(rC, 6, 5);
364 /*** Get CRn ***/
365 EXTRACT_HELPER(crfD, 23, 3);
366 EXTRACT_HELPER(crfS, 18, 3);
367 EXTRACT_HELPER(crbD, 21, 5);
368 EXTRACT_HELPER(crbA, 16, 5);
369 EXTRACT_HELPER(crbB, 11, 5);
370 /* SPR / TBL */
371 EXTRACT_HELPER(_SPR, 11, 10);
372 static inline uint32_t SPR(uint32_t opcode)
374 uint32_t sprn = _SPR(opcode);
376 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
378 /*** Get constants ***/
379 EXTRACT_HELPER(IMM, 12, 8);
380 /* 16 bits signed immediate value */
381 EXTRACT_SHELPER(SIMM, 0, 16);
382 /* 16 bits unsigned immediate value */
383 EXTRACT_HELPER(UIMM, 0, 16);
384 /* 5 bits signed immediate value */
385 EXTRACT_HELPER(SIMM5, 16, 5);
386 /* 5 bits signed immediate value */
387 EXTRACT_HELPER(UIMM5, 16, 5);
388 /* Bit count */
389 EXTRACT_HELPER(NB, 11, 5);
390 /* Shift count */
391 EXTRACT_HELPER(SH, 11, 5);
392 /* Vector shift count */
393 EXTRACT_HELPER(VSH, 6, 4);
394 /* Mask start */
395 EXTRACT_HELPER(MB, 6, 5);
396 /* Mask end */
397 EXTRACT_HELPER(ME, 1, 5);
398 /* Trap operand */
399 EXTRACT_HELPER(TO, 21, 5);
401 EXTRACT_HELPER(CRM, 12, 8);
402 EXTRACT_HELPER(FM, 17, 8);
403 EXTRACT_HELPER(SR, 16, 4);
404 EXTRACT_HELPER(FPIMM, 12, 4);
406 /*** Jump target decoding ***/
407 /* Displacement */
408 EXTRACT_SHELPER(d, 0, 16);
409 /* Immediate address */
410 static inline target_ulong LI(uint32_t opcode)
412 return (opcode >> 0) & 0x03FFFFFC;
415 static inline uint32_t BD(uint32_t opcode)
417 return (opcode >> 0) & 0xFFFC;
420 EXTRACT_HELPER(BO, 21, 5);
421 EXTRACT_HELPER(BI, 16, 5);
422 /* Absolute/relative address */
423 EXTRACT_HELPER(AA, 1, 1);
424 /* Link */
425 EXTRACT_HELPER(LK, 0, 1);
427 /* Create a mask between <start> and <end> bits */
428 static inline target_ulong MASK(uint32_t start, uint32_t end)
430 target_ulong ret;
432 #if defined(TARGET_PPC64)
433 if (likely(start == 0)) {
434 ret = UINT64_MAX << (63 - end);
435 } else if (likely(end == 63)) {
436 ret = UINT64_MAX >> start;
438 #else
439 if (likely(start == 0)) {
440 ret = UINT32_MAX << (31 - end);
441 } else if (likely(end == 31)) {
442 ret = UINT32_MAX >> start;
444 #endif
445 else {
446 ret = (((target_ulong)(-1ULL)) >> (start)) ^
447 (((target_ulong)(-1ULL) >> (end)) >> 1);
448 if (unlikely(start > end))
449 return ~ret;
452 return ret;
455 /*****************************************************************************/
456 /* PowerPC instructions table */
458 #if defined(DO_PPC_STATISTICS)
459 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
461 .opc1 = op1, \
462 .opc2 = op2, \
463 .opc3 = op3, \
464 .pad = { 0, }, \
465 .handler = { \
466 .inval = invl, \
467 .type = _typ, \
468 .handler = &gen_##name, \
469 .oname = stringify(name), \
470 }, \
471 .oname = stringify(name), \
473 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
475 .opc1 = op1, \
476 .opc2 = op2, \
477 .opc3 = op3, \
478 .pad = { 0, }, \
479 .handler = { \
480 .inval = invl, \
481 .type = _typ, \
482 .handler = &gen_##name, \
483 .oname = onam, \
484 }, \
485 .oname = onam, \
487 #else
488 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
490 .opc1 = op1, \
491 .opc2 = op2, \
492 .opc3 = op3, \
493 .pad = { 0, }, \
494 .handler = { \
495 .inval = invl, \
496 .type = _typ, \
497 .handler = &gen_##name, \
498 }, \
499 .oname = stringify(name), \
501 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
503 .opc1 = op1, \
504 .opc2 = op2, \
505 .opc3 = op3, \
506 .pad = { 0, }, \
507 .handler = { \
508 .inval = invl, \
509 .type = _typ, \
510 .handler = &gen_##name, \
511 }, \
512 .oname = onam, \
514 #endif
516 /* SPR load/store helpers */
517 static inline void gen_load_spr(TCGv t, int reg)
519 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
522 static inline void gen_store_spr(int reg, TCGv t)
524 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
527 /* Invalid instruction */
528 static void gen_invalid(DisasContext *ctx)
530 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
533 static opc_handler_t invalid_handler = {
534 .inval = 0xFFFFFFFF,
535 .type = PPC_NONE,
536 .handler = gen_invalid,
539 /*** Integer comparison ***/
541 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
543 int l1, l2, l3;
545 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
546 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
547 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
549 l1 = gen_new_label();
550 l2 = gen_new_label();
551 l3 = gen_new_label();
552 if (s) {
553 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
554 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
555 } else {
556 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
557 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
559 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
560 tcg_gen_br(l3);
561 gen_set_label(l1);
562 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
563 tcg_gen_br(l3);
564 gen_set_label(l2);
565 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
566 gen_set_label(l3);
569 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
571 TCGv t0 = tcg_const_local_tl(arg1);
572 gen_op_cmp(arg0, t0, s, crf);
573 tcg_temp_free(t0);
576 #if defined(TARGET_PPC64)
577 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
579 TCGv t0, t1;
580 t0 = tcg_temp_local_new();
581 t1 = tcg_temp_local_new();
582 if (s) {
583 tcg_gen_ext32s_tl(t0, arg0);
584 tcg_gen_ext32s_tl(t1, arg1);
585 } else {
586 tcg_gen_ext32u_tl(t0, arg0);
587 tcg_gen_ext32u_tl(t1, arg1);
589 gen_op_cmp(t0, t1, s, crf);
590 tcg_temp_free(t1);
591 tcg_temp_free(t0);
594 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
596 TCGv t0 = tcg_const_local_tl(arg1);
597 gen_op_cmp32(arg0, t0, s, crf);
598 tcg_temp_free(t0);
600 #endif
602 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
604 #if defined(TARGET_PPC64)
605 if (!(ctx->sf_mode))
606 gen_op_cmpi32(reg, 0, 1, 0);
607 else
608 #endif
609 gen_op_cmpi(reg, 0, 1, 0);
612 /* cmp */
613 static void gen_cmp(DisasContext *ctx)
615 #if defined(TARGET_PPC64)
616 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
617 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
618 1, crfD(ctx->opcode));
619 else
620 #endif
621 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
622 1, crfD(ctx->opcode));
625 /* cmpi */
626 static void gen_cmpi(DisasContext *ctx)
628 #if defined(TARGET_PPC64)
629 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
630 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
631 1, crfD(ctx->opcode));
632 else
633 #endif
634 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
635 1, crfD(ctx->opcode));
638 /* cmpl */
639 static void gen_cmpl(DisasContext *ctx)
641 #if defined(TARGET_PPC64)
642 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
643 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
644 0, crfD(ctx->opcode));
645 else
646 #endif
647 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
648 0, crfD(ctx->opcode));
651 /* cmpli */
652 static void gen_cmpli(DisasContext *ctx)
654 #if defined(TARGET_PPC64)
655 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
656 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
657 0, crfD(ctx->opcode));
658 else
659 #endif
660 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
661 0, crfD(ctx->opcode));
664 /* isel (PowerPC 2.03 specification) */
665 static void gen_isel(DisasContext *ctx)
667 int l1, l2;
668 uint32_t bi = rC(ctx->opcode);
669 uint32_t mask;
670 TCGv_i32 t0;
672 l1 = gen_new_label();
673 l2 = gen_new_label();
675 mask = 1 << (3 - (bi & 0x03));
676 t0 = tcg_temp_new_i32();
677 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
678 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
679 if (rA(ctx->opcode) == 0)
680 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
681 else
682 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
683 tcg_gen_br(l2);
684 gen_set_label(l1);
685 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
686 gen_set_label(l2);
687 tcg_temp_free_i32(t0);
690 /*** Integer arithmetic ***/
692 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
693 TCGv arg1, TCGv arg2, int sub)
695 int l1;
696 TCGv t0;
698 l1 = gen_new_label();
699 /* Start with XER OV disabled, the most likely case */
700 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
701 t0 = tcg_temp_local_new();
702 tcg_gen_xor_tl(t0, arg0, arg1);
703 #if defined(TARGET_PPC64)
704 if (!ctx->sf_mode)
705 tcg_gen_ext32s_tl(t0, t0);
706 #endif
707 if (sub)
708 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
709 else
710 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
711 tcg_gen_xor_tl(t0, arg1, arg2);
712 #if defined(TARGET_PPC64)
713 if (!ctx->sf_mode)
714 tcg_gen_ext32s_tl(t0, t0);
715 #endif
716 if (sub)
717 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
718 else
719 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
720 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
721 gen_set_label(l1);
722 tcg_temp_free(t0);
725 static inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1,
726 TCGv arg2, int sub)
728 int l1 = gen_new_label();
730 #if defined(TARGET_PPC64)
731 if (!(ctx->sf_mode)) {
732 TCGv t0, t1;
733 t0 = tcg_temp_new();
734 t1 = tcg_temp_new();
736 tcg_gen_ext32u_tl(t0, arg1);
737 tcg_gen_ext32u_tl(t1, arg2);
738 if (sub) {
739 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
740 } else {
741 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
743 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
744 gen_set_label(l1);
745 tcg_temp_free(t0);
746 tcg_temp_free(t1);
747 } else
748 #endif
750 if (sub) {
751 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
752 } else {
753 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
755 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
756 gen_set_label(l1);
760 /* Common add function */
761 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
762 TCGv arg2, int add_ca, int compute_ca,
763 int compute_ov)
765 TCGv t0, t1;
767 if ((!compute_ca && !compute_ov) ||
768 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
769 t0 = ret;
770 } else {
771 t0 = tcg_temp_local_new();
774 if (add_ca) {
775 t1 = tcg_temp_local_new();
776 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
777 tcg_gen_shri_tl(t1, t1, XER_CA);
778 } else {
779 TCGV_UNUSED(t1);
782 if (compute_ca && compute_ov) {
783 /* Start with XER CA and OV disabled, the most likely case */
784 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
785 } else if (compute_ca) {
786 /* Start with XER CA disabled, the most likely case */
787 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
788 } else if (compute_ov) {
789 /* Start with XER OV disabled, the most likely case */
790 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
793 tcg_gen_add_tl(t0, arg1, arg2);
795 if (compute_ca) {
796 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
798 if (add_ca) {
799 tcg_gen_add_tl(t0, t0, t1);
800 gen_op_arith_compute_ca(ctx, t0, t1, 0);
801 tcg_temp_free(t1);
803 if (compute_ov) {
804 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
807 if (unlikely(Rc(ctx->opcode) != 0))
808 gen_set_Rc0(ctx, t0);
810 if (!TCGV_EQUAL(t0, ret)) {
811 tcg_gen_mov_tl(ret, t0);
812 tcg_temp_free(t0);
815 /* Add functions with two operands */
816 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
817 static void glue(gen_, name)(DisasContext *ctx) \
819 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
820 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
821 add_ca, compute_ca, compute_ov); \
823 /* Add functions with one operand and one immediate */
824 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
825 add_ca, compute_ca, compute_ov) \
826 static void glue(gen_, name)(DisasContext *ctx) \
828 TCGv t0 = tcg_const_local_tl(const_val); \
829 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
830 cpu_gpr[rA(ctx->opcode)], t0, \
831 add_ca, compute_ca, compute_ov); \
832 tcg_temp_free(t0); \
835 /* add add. addo addo. */
836 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
837 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
838 /* addc addc. addco addco. */
839 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
840 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
841 /* adde adde. addeo addeo. */
842 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
843 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
844 /* addme addme. addmeo addmeo. */
845 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
846 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
847 /* addze addze. addzeo addzeo.*/
848 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
849 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
850 /* addi */
851 static void gen_addi(DisasContext *ctx)
853 target_long simm = SIMM(ctx->opcode);
855 if (rA(ctx->opcode) == 0) {
856 /* li case */
857 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
858 } else {
859 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
862 /* addic addic.*/
863 static inline void gen_op_addic(DisasContext *ctx, TCGv ret, TCGv arg1,
864 int compute_Rc0)
866 target_long simm = SIMM(ctx->opcode);
868 /* Start with XER CA and OV disabled, the most likely case */
869 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
871 if (likely(simm != 0)) {
872 TCGv t0 = tcg_temp_local_new();
873 tcg_gen_addi_tl(t0, arg1, simm);
874 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
875 tcg_gen_mov_tl(ret, t0);
876 tcg_temp_free(t0);
877 } else {
878 tcg_gen_mov_tl(ret, arg1);
880 if (compute_Rc0) {
881 gen_set_Rc0(ctx, ret);
885 static void gen_addic(DisasContext *ctx)
887 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
890 static void gen_addic_(DisasContext *ctx)
892 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
895 /* addis */
896 static void gen_addis(DisasContext *ctx)
898 target_long simm = SIMM(ctx->opcode);
900 if (rA(ctx->opcode) == 0) {
901 /* lis case */
902 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
903 } else {
904 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
908 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
909 TCGv arg2, int sign, int compute_ov)
911 int l1 = gen_new_label();
912 int l2 = gen_new_label();
913 TCGv_i32 t0 = tcg_temp_local_new_i32();
914 TCGv_i32 t1 = tcg_temp_local_new_i32();
916 tcg_gen_trunc_tl_i32(t0, arg1);
917 tcg_gen_trunc_tl_i32(t1, arg2);
918 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
919 if (sign) {
920 int l3 = gen_new_label();
921 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
922 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
923 gen_set_label(l3);
924 tcg_gen_div_i32(t0, t0, t1);
925 } else {
926 tcg_gen_divu_i32(t0, t0, t1);
928 if (compute_ov) {
929 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
931 tcg_gen_br(l2);
932 gen_set_label(l1);
933 if (sign) {
934 tcg_gen_sari_i32(t0, t0, 31);
935 } else {
936 tcg_gen_movi_i32(t0, 0);
938 if (compute_ov) {
939 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
941 gen_set_label(l2);
942 tcg_gen_extu_i32_tl(ret, t0);
943 tcg_temp_free_i32(t0);
944 tcg_temp_free_i32(t1);
945 if (unlikely(Rc(ctx->opcode) != 0))
946 gen_set_Rc0(ctx, ret);
948 /* Div functions */
949 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
950 static void glue(gen_, name)(DisasContext *ctx) \
952 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
953 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
954 sign, compute_ov); \
956 /* divwu divwu. divwuo divwuo. */
957 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
958 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
959 /* divw divw. divwo divwo. */
960 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
961 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
962 #if defined(TARGET_PPC64)
963 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
964 TCGv arg2, int sign, int compute_ov)
966 int l1 = gen_new_label();
967 int l2 = gen_new_label();
969 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
970 if (sign) {
971 int l3 = gen_new_label();
972 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
973 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
974 gen_set_label(l3);
975 tcg_gen_div_i64(ret, arg1, arg2);
976 } else {
977 tcg_gen_divu_i64(ret, arg1, arg2);
979 if (compute_ov) {
980 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
982 tcg_gen_br(l2);
983 gen_set_label(l1);
984 if (sign) {
985 tcg_gen_sari_i64(ret, arg1, 63);
986 } else {
987 tcg_gen_movi_i64(ret, 0);
989 if (compute_ov) {
990 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
992 gen_set_label(l2);
993 if (unlikely(Rc(ctx->opcode) != 0))
994 gen_set_Rc0(ctx, ret);
996 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
997 static void glue(gen_, name)(DisasContext *ctx) \
999 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1000 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1001 sign, compute_ov); \
1003 /* divwu divwu. divwuo divwuo. */
1004 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1005 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1006 /* divw divw. divwo divwo. */
1007 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1008 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1009 #endif
1011 /* mulhw mulhw. */
1012 static void gen_mulhw(DisasContext *ctx)
1014 TCGv_i64 t0, t1;
1016 t0 = tcg_temp_new_i64();
1017 t1 = tcg_temp_new_i64();
1018 #if defined(TARGET_PPC64)
1019 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1020 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1021 tcg_gen_mul_i64(t0, t0, t1);
1022 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1023 #else
1024 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1025 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1026 tcg_gen_mul_i64(t0, t0, t1);
1027 tcg_gen_shri_i64(t0, t0, 32);
1028 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1029 #endif
1030 tcg_temp_free_i64(t0);
1031 tcg_temp_free_i64(t1);
1032 if (unlikely(Rc(ctx->opcode) != 0))
1033 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1036 /* mulhwu mulhwu. */
1037 static void gen_mulhwu(DisasContext *ctx)
1039 TCGv_i64 t0, t1;
1041 t0 = tcg_temp_new_i64();
1042 t1 = tcg_temp_new_i64();
1043 #if defined(TARGET_PPC64)
1044 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1045 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1046 tcg_gen_mul_i64(t0, t0, t1);
1047 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1048 #else
1049 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1050 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1051 tcg_gen_mul_i64(t0, t0, t1);
1052 tcg_gen_shri_i64(t0, t0, 32);
1053 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1054 #endif
1055 tcg_temp_free_i64(t0);
1056 tcg_temp_free_i64(t1);
1057 if (unlikely(Rc(ctx->opcode) != 0))
1058 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1061 /* mullw mullw. */
1062 static void gen_mullw(DisasContext *ctx)
1064 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1065 cpu_gpr[rB(ctx->opcode)]);
1066 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1067 if (unlikely(Rc(ctx->opcode) != 0))
1068 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1071 /* mullwo mullwo. */
1072 static void gen_mullwo(DisasContext *ctx)
1074 int l1;
1075 TCGv_i64 t0, t1;
1077 t0 = tcg_temp_new_i64();
1078 t1 = tcg_temp_new_i64();
1079 l1 = gen_new_label();
1080 /* Start with XER OV disabled, the most likely case */
1081 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1082 #if defined(TARGET_PPC64)
1083 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1084 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1085 #else
1086 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1087 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1088 #endif
1089 tcg_gen_mul_i64(t0, t0, t1);
1090 #if defined(TARGET_PPC64)
1091 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1092 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1093 #else
1094 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1095 tcg_gen_ext32s_i64(t1, t0);
1096 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1097 #endif
1098 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1099 gen_set_label(l1);
1100 tcg_temp_free_i64(t0);
1101 tcg_temp_free_i64(t1);
1102 if (unlikely(Rc(ctx->opcode) != 0))
1103 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1106 /* mulli */
1107 static void gen_mulli(DisasContext *ctx)
1109 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1110 SIMM(ctx->opcode));
1112 #if defined(TARGET_PPC64)
1113 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1114 static void glue(gen_, name)(DisasContext *ctx) \
1116 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1117 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1118 if (unlikely(Rc(ctx->opcode) != 0)) \
1119 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1121 /* mulhd mulhd. */
1122 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1123 /* mulhdu mulhdu. */
1124 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1126 /* mulld mulld. */
1127 static void gen_mulld(DisasContext *ctx)
1129 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1130 cpu_gpr[rB(ctx->opcode)]);
1131 if (unlikely(Rc(ctx->opcode) != 0))
1132 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1134 /* mulldo mulldo. */
1135 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1136 #endif
1138 /* neg neg. nego nego. */
1139 static inline void gen_op_arith_neg(DisasContext *ctx, TCGv ret, TCGv arg1,
1140 int ov_check)
1142 int l1 = gen_new_label();
1143 int l2 = gen_new_label();
1144 TCGv t0 = tcg_temp_local_new();
1145 #if defined(TARGET_PPC64)
1146 if (ctx->sf_mode) {
1147 tcg_gen_mov_tl(t0, arg1);
1148 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1149 } else
1150 #endif
1152 tcg_gen_ext32s_tl(t0, arg1);
1153 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1155 tcg_gen_neg_tl(ret, arg1);
1156 if (ov_check) {
1157 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1159 tcg_gen_br(l2);
1160 gen_set_label(l1);
1161 tcg_gen_mov_tl(ret, t0);
1162 if (ov_check) {
1163 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1165 gen_set_label(l2);
1166 tcg_temp_free(t0);
1167 if (unlikely(Rc(ctx->opcode) != 0))
1168 gen_set_Rc0(ctx, ret);
1171 static void gen_neg(DisasContext *ctx)
1173 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1176 static void gen_nego(DisasContext *ctx)
1178 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1181 /* Common subf function */
1182 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1183 TCGv arg2, int add_ca, int compute_ca,
1184 int compute_ov)
1186 TCGv t0, t1;
1188 if ((!compute_ca && !compute_ov) ||
1189 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1190 t0 = ret;
1191 } else {
1192 t0 = tcg_temp_local_new();
1195 if (add_ca) {
1196 t1 = tcg_temp_local_new();
1197 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1198 tcg_gen_shri_tl(t1, t1, XER_CA);
1199 } else {
1200 TCGV_UNUSED(t1);
1203 if (compute_ca && compute_ov) {
1204 /* Start with XER CA and OV disabled, the most likely case */
1205 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1206 } else if (compute_ca) {
1207 /* Start with XER CA disabled, the most likely case */
1208 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1209 } else if (compute_ov) {
1210 /* Start with XER OV disabled, the most likely case */
1211 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1214 if (add_ca) {
1215 tcg_gen_not_tl(t0, arg1);
1216 tcg_gen_add_tl(t0, t0, arg2);
1217 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1218 tcg_gen_add_tl(t0, t0, t1);
1219 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1220 tcg_temp_free(t1);
1221 } else {
1222 tcg_gen_sub_tl(t0, arg2, arg1);
1223 if (compute_ca) {
1224 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1227 if (compute_ov) {
1228 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1231 if (unlikely(Rc(ctx->opcode) != 0))
1232 gen_set_Rc0(ctx, t0);
1234 if (!TCGV_EQUAL(t0, ret)) {
1235 tcg_gen_mov_tl(ret, t0);
1236 tcg_temp_free(t0);
1239 /* Sub functions with Two operands functions */
1240 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1241 static void glue(gen_, name)(DisasContext *ctx) \
1243 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1244 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1245 add_ca, compute_ca, compute_ov); \
1247 /* Sub functions with one operand and one immediate */
1248 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1249 add_ca, compute_ca, compute_ov) \
1250 static void glue(gen_, name)(DisasContext *ctx) \
1252 TCGv t0 = tcg_const_local_tl(const_val); \
1253 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1254 cpu_gpr[rA(ctx->opcode)], t0, \
1255 add_ca, compute_ca, compute_ov); \
1256 tcg_temp_free(t0); \
1258 /* subf subf. subfo subfo. */
1259 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1260 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1261 /* subfc subfc. subfco subfco. */
1262 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1263 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1264 /* subfe subfe. subfeo subfo. */
1265 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1266 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1267 /* subfme subfme. subfmeo subfmeo. */
1268 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1269 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1270 /* subfze subfze. subfzeo subfzeo.*/
1271 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1272 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1274 /* subfic */
1275 static void gen_subfic(DisasContext *ctx)
1277 /* Start with XER CA and OV disabled, the most likely case */
1278 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1279 TCGv t0 = tcg_temp_local_new();
1280 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1281 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1282 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1283 tcg_temp_free(t1);
1284 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1285 tcg_temp_free(t0);
1288 /*** Integer logical ***/
1289 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1290 static void glue(gen_, name)(DisasContext *ctx) \
1292 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1293 cpu_gpr[rB(ctx->opcode)]); \
1294 if (unlikely(Rc(ctx->opcode) != 0)) \
1295 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1298 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1299 static void glue(gen_, name)(DisasContext *ctx) \
1301 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1302 if (unlikely(Rc(ctx->opcode) != 0)) \
1303 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1306 /* and & and. */
1307 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1308 /* andc & andc. */
1309 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1311 /* andi. */
1312 static void gen_andi_(DisasContext *ctx)
1314 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1315 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1318 /* andis. */
1319 static void gen_andis_(DisasContext *ctx)
1321 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1322 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1325 /* cntlzw */
1326 static void gen_cntlzw(DisasContext *ctx)
1328 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1329 if (unlikely(Rc(ctx->opcode) != 0))
1330 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1332 /* eqv & eqv. */
1333 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1334 /* extsb & extsb. */
1335 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1336 /* extsh & extsh. */
1337 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1338 /* nand & nand. */
1339 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1340 /* nor & nor. */
1341 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1343 /* or & or. */
1344 static void gen_or(DisasContext *ctx)
1346 int rs, ra, rb;
1348 rs = rS(ctx->opcode);
1349 ra = rA(ctx->opcode);
1350 rb = rB(ctx->opcode);
1351 /* Optimisation for mr. ri case */
1352 if (rs != ra || rs != rb) {
1353 if (rs != rb)
1354 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1355 else
1356 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1357 if (unlikely(Rc(ctx->opcode) != 0))
1358 gen_set_Rc0(ctx, cpu_gpr[ra]);
1359 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1360 gen_set_Rc0(ctx, cpu_gpr[rs]);
1361 #if defined(TARGET_PPC64)
1362 } else {
1363 int prio = 0;
1365 switch (rs) {
1366 case 1:
1367 /* Set process priority to low */
1368 prio = 2;
1369 break;
1370 case 6:
1371 /* Set process priority to medium-low */
1372 prio = 3;
1373 break;
1374 case 2:
1375 /* Set process priority to normal */
1376 prio = 4;
1377 break;
1378 #if !defined(CONFIG_USER_ONLY)
1379 case 31:
1380 if (ctx->mem_idx > 0) {
1381 /* Set process priority to very low */
1382 prio = 1;
1384 break;
1385 case 5:
1386 if (ctx->mem_idx > 0) {
1387 /* Set process priority to medium-hight */
1388 prio = 5;
1390 break;
1391 case 3:
1392 if (ctx->mem_idx > 0) {
1393 /* Set process priority to high */
1394 prio = 6;
1396 break;
1397 case 7:
1398 if (ctx->mem_idx > 1) {
1399 /* Set process priority to very high */
1400 prio = 7;
1402 break;
1403 #endif
1404 default:
1405 /* nop */
1406 break;
1408 if (prio) {
1409 TCGv t0 = tcg_temp_new();
1410 gen_load_spr(t0, SPR_PPR);
1411 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1412 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1413 gen_store_spr(SPR_PPR, t0);
1414 tcg_temp_free(t0);
1416 #endif
1419 /* orc & orc. */
1420 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1422 /* xor & xor. */
1423 static void gen_xor(DisasContext *ctx)
1425 /* Optimisation for "set to zero" case */
1426 if (rS(ctx->opcode) != rB(ctx->opcode))
1427 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1428 else
1429 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1430 if (unlikely(Rc(ctx->opcode) != 0))
1431 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1434 /* ori */
1435 static void gen_ori(DisasContext *ctx)
1437 target_ulong uimm = UIMM(ctx->opcode);
1439 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1440 /* NOP */
1441 /* XXX: should handle special NOPs for POWER series */
1442 return;
1444 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1447 /* oris */
1448 static void gen_oris(DisasContext *ctx)
1450 target_ulong uimm = UIMM(ctx->opcode);
1452 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1453 /* NOP */
1454 return;
1456 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1459 /* xori */
1460 static void gen_xori(DisasContext *ctx)
1462 target_ulong uimm = UIMM(ctx->opcode);
1464 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1465 /* NOP */
1466 return;
1468 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1471 /* xoris */
1472 static void gen_xoris(DisasContext *ctx)
1474 target_ulong uimm = UIMM(ctx->opcode);
1476 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1477 /* NOP */
1478 return;
1480 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1483 /* popcntb : PowerPC 2.03 specification */
1484 static void gen_popcntb(DisasContext *ctx)
1486 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1489 static void gen_popcntw(DisasContext *ctx)
1491 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1494 #if defined(TARGET_PPC64)
1495 /* popcntd: PowerPC 2.06 specification */
1496 static void gen_popcntd(DisasContext *ctx)
1498 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1500 #endif
1502 #if defined(TARGET_PPC64)
1503 /* extsw & extsw. */
1504 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1506 /* cntlzd */
1507 static void gen_cntlzd(DisasContext *ctx)
1509 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1510 if (unlikely(Rc(ctx->opcode) != 0))
1511 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1513 #endif
1515 /*** Integer rotate ***/
1517 /* rlwimi & rlwimi. */
1518 static void gen_rlwimi(DisasContext *ctx)
1520 uint32_t mb, me, sh;
1522 mb = MB(ctx->opcode);
1523 me = ME(ctx->opcode);
1524 sh = SH(ctx->opcode);
1525 if (likely(sh == 0 && mb == 0 && me == 31)) {
1526 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1527 } else {
1528 target_ulong mask;
1529 TCGv t1;
1530 TCGv t0 = tcg_temp_new();
1531 #if defined(TARGET_PPC64)
1532 TCGv_i32 t2 = tcg_temp_new_i32();
1533 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1534 tcg_gen_rotli_i32(t2, t2, sh);
1535 tcg_gen_extu_i32_i64(t0, t2);
1536 tcg_temp_free_i32(t2);
1537 #else
1538 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1539 #endif
1540 #if defined(TARGET_PPC64)
1541 mb += 32;
1542 me += 32;
1543 #endif
1544 mask = MASK(mb, me);
1545 t1 = tcg_temp_new();
1546 tcg_gen_andi_tl(t0, t0, mask);
1547 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1548 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1549 tcg_temp_free(t0);
1550 tcg_temp_free(t1);
1552 if (unlikely(Rc(ctx->opcode) != 0))
1553 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1556 /* rlwinm & rlwinm. */
1557 static void gen_rlwinm(DisasContext *ctx)
1559 uint32_t mb, me, sh;
1561 sh = SH(ctx->opcode);
1562 mb = MB(ctx->opcode);
1563 me = ME(ctx->opcode);
1565 if (likely(mb == 0 && me == (31 - sh))) {
1566 if (likely(sh == 0)) {
1567 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1568 } else {
1569 TCGv t0 = tcg_temp_new();
1570 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1571 tcg_gen_shli_tl(t0, t0, sh);
1572 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1573 tcg_temp_free(t0);
1575 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1576 TCGv t0 = tcg_temp_new();
1577 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1578 tcg_gen_shri_tl(t0, t0, mb);
1579 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1580 tcg_temp_free(t0);
1581 } else {
1582 TCGv t0 = tcg_temp_new();
1583 #if defined(TARGET_PPC64)
1584 TCGv_i32 t1 = tcg_temp_new_i32();
1585 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1586 tcg_gen_rotli_i32(t1, t1, sh);
1587 tcg_gen_extu_i32_i64(t0, t1);
1588 tcg_temp_free_i32(t1);
1589 #else
1590 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1591 #endif
1592 #if defined(TARGET_PPC64)
1593 mb += 32;
1594 me += 32;
1595 #endif
1596 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1597 tcg_temp_free(t0);
1599 if (unlikely(Rc(ctx->opcode) != 0))
1600 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1603 /* rlwnm & rlwnm. */
1604 static void gen_rlwnm(DisasContext *ctx)
1606 uint32_t mb, me;
1607 TCGv t0;
1608 #if defined(TARGET_PPC64)
1609 TCGv_i32 t1, t2;
1610 #endif
1612 mb = MB(ctx->opcode);
1613 me = ME(ctx->opcode);
1614 t0 = tcg_temp_new();
1615 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1616 #if defined(TARGET_PPC64)
1617 t1 = tcg_temp_new_i32();
1618 t2 = tcg_temp_new_i32();
1619 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1620 tcg_gen_trunc_i64_i32(t2, t0);
1621 tcg_gen_rotl_i32(t1, t1, t2);
1622 tcg_gen_extu_i32_i64(t0, t1);
1623 tcg_temp_free_i32(t1);
1624 tcg_temp_free_i32(t2);
1625 #else
1626 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1627 #endif
1628 if (unlikely(mb != 0 || me != 31)) {
1629 #if defined(TARGET_PPC64)
1630 mb += 32;
1631 me += 32;
1632 #endif
1633 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1634 } else {
1635 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1637 tcg_temp_free(t0);
1638 if (unlikely(Rc(ctx->opcode) != 0))
1639 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1642 #if defined(TARGET_PPC64)
1643 #define GEN_PPC64_R2(name, opc1, opc2) \
1644 static void glue(gen_, name##0)(DisasContext *ctx) \
1646 gen_##name(ctx, 0); \
1649 static void glue(gen_, name##1)(DisasContext *ctx) \
1651 gen_##name(ctx, 1); \
1653 #define GEN_PPC64_R4(name, opc1, opc2) \
1654 static void glue(gen_, name##0)(DisasContext *ctx) \
1656 gen_##name(ctx, 0, 0); \
1659 static void glue(gen_, name##1)(DisasContext *ctx) \
1661 gen_##name(ctx, 0, 1); \
1664 static void glue(gen_, name##2)(DisasContext *ctx) \
1666 gen_##name(ctx, 1, 0); \
1669 static void glue(gen_, name##3)(DisasContext *ctx) \
1671 gen_##name(ctx, 1, 1); \
1674 static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1675 uint32_t sh)
1677 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1678 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1679 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1680 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1681 } else {
1682 TCGv t0 = tcg_temp_new();
1683 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1684 if (likely(mb == 0 && me == 63)) {
1685 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1686 } else {
1687 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1689 tcg_temp_free(t0);
1691 if (unlikely(Rc(ctx->opcode) != 0))
1692 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1694 /* rldicl - rldicl. */
1695 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1697 uint32_t sh, mb;
1699 sh = SH(ctx->opcode) | (shn << 5);
1700 mb = MB(ctx->opcode) | (mbn << 5);
1701 gen_rldinm(ctx, mb, 63, sh);
1703 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1704 /* rldicr - rldicr. */
1705 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1707 uint32_t sh, me;
1709 sh = SH(ctx->opcode) | (shn << 5);
1710 me = MB(ctx->opcode) | (men << 5);
1711 gen_rldinm(ctx, 0, me, sh);
1713 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1714 /* rldic - rldic. */
1715 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1717 uint32_t sh, mb;
1719 sh = SH(ctx->opcode) | (shn << 5);
1720 mb = MB(ctx->opcode) | (mbn << 5);
1721 gen_rldinm(ctx, mb, 63 - sh, sh);
1723 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1725 static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1727 TCGv t0;
1729 mb = MB(ctx->opcode);
1730 me = ME(ctx->opcode);
1731 t0 = tcg_temp_new();
1732 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1733 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1734 if (unlikely(mb != 0 || me != 63)) {
1735 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1736 } else {
1737 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1739 tcg_temp_free(t0);
1740 if (unlikely(Rc(ctx->opcode) != 0))
1741 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1744 /* rldcl - rldcl. */
1745 static inline void gen_rldcl(DisasContext *ctx, int mbn)
1747 uint32_t mb;
1749 mb = MB(ctx->opcode) | (mbn << 5);
1750 gen_rldnm(ctx, mb, 63);
1752 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1753 /* rldcr - rldcr. */
1754 static inline void gen_rldcr(DisasContext *ctx, int men)
1756 uint32_t me;
1758 me = MB(ctx->opcode) | (men << 5);
1759 gen_rldnm(ctx, 0, me);
1761 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1762 /* rldimi - rldimi. */
1763 static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1765 uint32_t sh, mb, me;
1767 sh = SH(ctx->opcode) | (shn << 5);
1768 mb = MB(ctx->opcode) | (mbn << 5);
1769 me = 63 - sh;
1770 if (unlikely(sh == 0 && mb == 0)) {
1771 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1772 } else {
1773 TCGv t0, t1;
1774 target_ulong mask;
1776 t0 = tcg_temp_new();
1777 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1778 t1 = tcg_temp_new();
1779 mask = MASK(mb, me);
1780 tcg_gen_andi_tl(t0, t0, mask);
1781 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1782 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1783 tcg_temp_free(t0);
1784 tcg_temp_free(t1);
1786 if (unlikely(Rc(ctx->opcode) != 0))
1787 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1789 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1790 #endif
1792 /*** Integer shift ***/
1794 /* slw & slw. */
1795 static void gen_slw(DisasContext *ctx)
1797 TCGv t0, t1;
1799 t0 = tcg_temp_new();
1800 /* AND rS with a mask that is 0 when rB >= 0x20 */
1801 #if defined(TARGET_PPC64)
1802 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1803 tcg_gen_sari_tl(t0, t0, 0x3f);
1804 #else
1805 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1806 tcg_gen_sari_tl(t0, t0, 0x1f);
1807 #endif
1808 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1809 t1 = tcg_temp_new();
1810 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1811 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1812 tcg_temp_free(t1);
1813 tcg_temp_free(t0);
1814 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1815 if (unlikely(Rc(ctx->opcode) != 0))
1816 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1819 /* sraw & sraw. */
1820 static void gen_sraw(DisasContext *ctx)
1822 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1823 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1824 if (unlikely(Rc(ctx->opcode) != 0))
1825 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1828 /* srawi & srawi. */
1829 static void gen_srawi(DisasContext *ctx)
1831 int sh = SH(ctx->opcode);
1832 if (sh != 0) {
1833 int l1, l2;
1834 TCGv t0;
1835 l1 = gen_new_label();
1836 l2 = gen_new_label();
1837 t0 = tcg_temp_local_new();
1838 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1839 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1840 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1841 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1842 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1843 tcg_gen_br(l2);
1844 gen_set_label(l1);
1845 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1846 gen_set_label(l2);
1847 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1848 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1849 tcg_temp_free(t0);
1850 } else {
1851 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1852 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1854 if (unlikely(Rc(ctx->opcode) != 0))
1855 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1858 /* srw & srw. */
1859 static void gen_srw(DisasContext *ctx)
1861 TCGv t0, t1;
1863 t0 = tcg_temp_new();
1864 /* AND rS with a mask that is 0 when rB >= 0x20 */
1865 #if defined(TARGET_PPC64)
1866 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1867 tcg_gen_sari_tl(t0, t0, 0x3f);
1868 #else
1869 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1870 tcg_gen_sari_tl(t0, t0, 0x1f);
1871 #endif
1872 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1873 tcg_gen_ext32u_tl(t0, t0);
1874 t1 = tcg_temp_new();
1875 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1876 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1877 tcg_temp_free(t1);
1878 tcg_temp_free(t0);
1879 if (unlikely(Rc(ctx->opcode) != 0))
1880 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1883 #if defined(TARGET_PPC64)
1884 /* sld & sld. */
1885 static void gen_sld(DisasContext *ctx)
1887 TCGv t0, t1;
1889 t0 = tcg_temp_new();
1890 /* AND rS with a mask that is 0 when rB >= 0x40 */
1891 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1892 tcg_gen_sari_tl(t0, t0, 0x3f);
1893 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1894 t1 = tcg_temp_new();
1895 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1896 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1897 tcg_temp_free(t1);
1898 tcg_temp_free(t0);
1899 if (unlikely(Rc(ctx->opcode) != 0))
1900 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1903 /* srad & srad. */
1904 static void gen_srad(DisasContext *ctx)
1906 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
1907 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1908 if (unlikely(Rc(ctx->opcode) != 0))
1909 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1911 /* sradi & sradi. */
1912 static inline void gen_sradi(DisasContext *ctx, int n)
1914 int sh = SH(ctx->opcode) + (n << 5);
1915 if (sh != 0) {
1916 int l1, l2;
1917 TCGv t0;
1918 l1 = gen_new_label();
1919 l2 = gen_new_label();
1920 t0 = tcg_temp_local_new();
1921 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
1922 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1923 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1924 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1925 tcg_gen_br(l2);
1926 gen_set_label(l1);
1927 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1928 gen_set_label(l2);
1929 tcg_temp_free(t0);
1930 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1931 } else {
1932 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1933 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1935 if (unlikely(Rc(ctx->opcode) != 0))
1936 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1939 static void gen_sradi0(DisasContext *ctx)
1941 gen_sradi(ctx, 0);
1944 static void gen_sradi1(DisasContext *ctx)
1946 gen_sradi(ctx, 1);
1949 /* srd & srd. */
1950 static void gen_srd(DisasContext *ctx)
1952 TCGv t0, t1;
1954 t0 = tcg_temp_new();
1955 /* AND rS with a mask that is 0 when rB >= 0x40 */
1956 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1957 tcg_gen_sari_tl(t0, t0, 0x3f);
1958 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1959 t1 = tcg_temp_new();
1960 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1961 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1962 tcg_temp_free(t1);
1963 tcg_temp_free(t0);
1964 if (unlikely(Rc(ctx->opcode) != 0))
1965 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1967 #endif
1969 /*** Floating-Point arithmetic ***/
1970 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1971 static void gen_f##name(DisasContext *ctx) \
1973 if (unlikely(!ctx->fpu_enabled)) { \
1974 gen_exception(ctx, POWERPC_EXCP_FPU); \
1975 return; \
1977 /* NIP cannot be restored if the memory exception comes from an helper */ \
1978 gen_update_nip(ctx, ctx->nip - 4); \
1979 gen_reset_fpstatus(); \
1980 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
1981 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1982 if (isfloat) { \
1983 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
1985 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
1986 Rc(ctx->opcode) != 0); \
1989 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1990 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1991 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1993 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1994 static void gen_f##name(DisasContext *ctx) \
1996 if (unlikely(!ctx->fpu_enabled)) { \
1997 gen_exception(ctx, POWERPC_EXCP_FPU); \
1998 return; \
2000 /* NIP cannot be restored if the memory exception comes from an helper */ \
2001 gen_update_nip(ctx, ctx->nip - 4); \
2002 gen_reset_fpstatus(); \
2003 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2004 cpu_fpr[rB(ctx->opcode)]); \
2005 if (isfloat) { \
2006 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2008 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2009 set_fprf, Rc(ctx->opcode) != 0); \
2011 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2012 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2013 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2015 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2016 static void gen_f##name(DisasContext *ctx) \
2018 if (unlikely(!ctx->fpu_enabled)) { \
2019 gen_exception(ctx, POWERPC_EXCP_FPU); \
2020 return; \
2022 /* NIP cannot be restored if the memory exception comes from an helper */ \
2023 gen_update_nip(ctx, ctx->nip - 4); \
2024 gen_reset_fpstatus(); \
2025 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2026 cpu_fpr[rC(ctx->opcode)]); \
2027 if (isfloat) { \
2028 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2030 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2031 set_fprf, Rc(ctx->opcode) != 0); \
2033 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2034 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2035 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2037 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2038 static void gen_f##name(DisasContext *ctx) \
2040 if (unlikely(!ctx->fpu_enabled)) { \
2041 gen_exception(ctx, POWERPC_EXCP_FPU); \
2042 return; \
2044 /* NIP cannot be restored if the memory exception comes from an helper */ \
2045 gen_update_nip(ctx, ctx->nip - 4); \
2046 gen_reset_fpstatus(); \
2047 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2048 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2049 set_fprf, Rc(ctx->opcode) != 0); \
2052 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2053 static void gen_f##name(DisasContext *ctx) \
2055 if (unlikely(!ctx->fpu_enabled)) { \
2056 gen_exception(ctx, POWERPC_EXCP_FPU); \
2057 return; \
2059 /* NIP cannot be restored if the memory exception comes from an helper */ \
2060 gen_update_nip(ctx, ctx->nip - 4); \
2061 gen_reset_fpstatus(); \
2062 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2063 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2064 set_fprf, Rc(ctx->opcode) != 0); \
2067 /* fadd - fadds */
2068 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2069 /* fdiv - fdivs */
2070 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2071 /* fmul - fmuls */
2072 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2074 /* fre */
2075 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2077 /* fres */
2078 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2080 /* frsqrte */
2081 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2083 /* frsqrtes */
2084 static void gen_frsqrtes(DisasContext *ctx)
2086 if (unlikely(!ctx->fpu_enabled)) {
2087 gen_exception(ctx, POWERPC_EXCP_FPU);
2088 return;
2090 /* NIP cannot be restored if the memory exception comes from an helper */
2091 gen_update_nip(ctx, ctx->nip - 4);
2092 gen_reset_fpstatus();
2093 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2094 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2095 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2098 /* fsel */
2099 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2100 /* fsub - fsubs */
2101 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2102 /* Optional: */
2104 /* fsqrt */
2105 static void gen_fsqrt(DisasContext *ctx)
2107 if (unlikely(!ctx->fpu_enabled)) {
2108 gen_exception(ctx, POWERPC_EXCP_FPU);
2109 return;
2111 /* NIP cannot be restored if the memory exception comes from an helper */
2112 gen_update_nip(ctx, ctx->nip - 4);
2113 gen_reset_fpstatus();
2114 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2115 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2118 static void gen_fsqrts(DisasContext *ctx)
2120 if (unlikely(!ctx->fpu_enabled)) {
2121 gen_exception(ctx, POWERPC_EXCP_FPU);
2122 return;
2124 /* NIP cannot be restored if the memory exception comes from an helper */
2125 gen_update_nip(ctx, ctx->nip - 4);
2126 gen_reset_fpstatus();
2127 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2128 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2129 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2132 /*** Floating-Point multiply-and-add ***/
2133 /* fmadd - fmadds */
2134 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2135 /* fmsub - fmsubs */
2136 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2137 /* fnmadd - fnmadds */
2138 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2139 /* fnmsub - fnmsubs */
2140 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2142 /*** Floating-Point round & convert ***/
2143 /* fctiw */
2144 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2145 /* fctiwz */
2146 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2147 /* frsp */
2148 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2149 #if defined(TARGET_PPC64)
2150 /* fcfid */
2151 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2152 /* fctid */
2153 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2154 /* fctidz */
2155 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2156 #endif
2158 /* frin */
2159 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2160 /* friz */
2161 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2162 /* frip */
2163 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2164 /* frim */
2165 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2167 /*** Floating-Point compare ***/
2169 /* fcmpo */
2170 static void gen_fcmpo(DisasContext *ctx)
2172 TCGv_i32 crf;
2173 if (unlikely(!ctx->fpu_enabled)) {
2174 gen_exception(ctx, POWERPC_EXCP_FPU);
2175 return;
2177 /* NIP cannot be restored if the memory exception comes from an helper */
2178 gen_update_nip(ctx, ctx->nip - 4);
2179 gen_reset_fpstatus();
2180 crf = tcg_const_i32(crfD(ctx->opcode));
2181 gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2182 tcg_temp_free_i32(crf);
2183 gen_helper_float_check_status();
2186 /* fcmpu */
2187 static void gen_fcmpu(DisasContext *ctx)
2189 TCGv_i32 crf;
2190 if (unlikely(!ctx->fpu_enabled)) {
2191 gen_exception(ctx, POWERPC_EXCP_FPU);
2192 return;
2194 /* NIP cannot be restored if the memory exception comes from an helper */
2195 gen_update_nip(ctx, ctx->nip - 4);
2196 gen_reset_fpstatus();
2197 crf = tcg_const_i32(crfD(ctx->opcode));
2198 gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2199 tcg_temp_free_i32(crf);
2200 gen_helper_float_check_status();
2203 /*** Floating-point move ***/
2204 /* fabs */
2205 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2206 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2208 /* fmr - fmr. */
2209 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2210 static void gen_fmr(DisasContext *ctx)
2212 if (unlikely(!ctx->fpu_enabled)) {
2213 gen_exception(ctx, POWERPC_EXCP_FPU);
2214 return;
2216 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2217 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2220 /* fnabs */
2221 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2222 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2223 /* fneg */
2224 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2225 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2227 /*** Floating-Point status & ctrl register ***/
2229 /* mcrfs */
2230 static void gen_mcrfs(DisasContext *ctx)
2232 int bfa;
2234 if (unlikely(!ctx->fpu_enabled)) {
2235 gen_exception(ctx, POWERPC_EXCP_FPU);
2236 return;
2238 bfa = 4 * (7 - crfS(ctx->opcode));
2239 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2240 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2241 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2244 /* mffs */
2245 static void gen_mffs(DisasContext *ctx)
2247 if (unlikely(!ctx->fpu_enabled)) {
2248 gen_exception(ctx, POWERPC_EXCP_FPU);
2249 return;
2251 gen_reset_fpstatus();
2252 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2253 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2256 /* mtfsb0 */
2257 static void gen_mtfsb0(DisasContext *ctx)
2259 uint8_t crb;
2261 if (unlikely(!ctx->fpu_enabled)) {
2262 gen_exception(ctx, POWERPC_EXCP_FPU);
2263 return;
2265 crb = 31 - crbD(ctx->opcode);
2266 gen_reset_fpstatus();
2267 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2268 TCGv_i32 t0;
2269 /* NIP cannot be restored if the memory exception comes from an helper */
2270 gen_update_nip(ctx, ctx->nip - 4);
2271 t0 = tcg_const_i32(crb);
2272 gen_helper_fpscr_clrbit(t0);
2273 tcg_temp_free_i32(t0);
2275 if (unlikely(Rc(ctx->opcode) != 0)) {
2276 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2280 /* mtfsb1 */
2281 static void gen_mtfsb1(DisasContext *ctx)
2283 uint8_t crb;
2285 if (unlikely(!ctx->fpu_enabled)) {
2286 gen_exception(ctx, POWERPC_EXCP_FPU);
2287 return;
2289 crb = 31 - crbD(ctx->opcode);
2290 gen_reset_fpstatus();
2291 /* XXX: we pretend we can only do IEEE floating-point computations */
2292 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2293 TCGv_i32 t0;
2294 /* NIP cannot be restored if the memory exception comes from an helper */
2295 gen_update_nip(ctx, ctx->nip - 4);
2296 t0 = tcg_const_i32(crb);
2297 gen_helper_fpscr_setbit(t0);
2298 tcg_temp_free_i32(t0);
2300 if (unlikely(Rc(ctx->opcode) != 0)) {
2301 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2303 /* We can raise a differed exception */
2304 gen_helper_float_check_status();
2307 /* mtfsf */
2308 static void gen_mtfsf(DisasContext *ctx)
2310 TCGv_i32 t0;
2311 int L = ctx->opcode & 0x02000000;
2313 if (unlikely(!ctx->fpu_enabled)) {
2314 gen_exception(ctx, POWERPC_EXCP_FPU);
2315 return;
2317 /* NIP cannot be restored if the memory exception comes from an helper */
2318 gen_update_nip(ctx, ctx->nip - 4);
2319 gen_reset_fpstatus();
2320 if (L)
2321 t0 = tcg_const_i32(0xff);
2322 else
2323 t0 = tcg_const_i32(FM(ctx->opcode));
2324 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2325 tcg_temp_free_i32(t0);
2326 if (unlikely(Rc(ctx->opcode) != 0)) {
2327 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2329 /* We can raise a differed exception */
2330 gen_helper_float_check_status();
2333 /* mtfsfi */
2334 static void gen_mtfsfi(DisasContext *ctx)
2336 int bf, sh;
2337 TCGv_i64 t0;
2338 TCGv_i32 t1;
2340 if (unlikely(!ctx->fpu_enabled)) {
2341 gen_exception(ctx, POWERPC_EXCP_FPU);
2342 return;
2344 bf = crbD(ctx->opcode) >> 2;
2345 sh = 7 - bf;
2346 /* NIP cannot be restored if the memory exception comes from an helper */
2347 gen_update_nip(ctx, ctx->nip - 4);
2348 gen_reset_fpstatus();
2349 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2350 t1 = tcg_const_i32(1 << sh);
2351 gen_helper_store_fpscr(t0, t1);
2352 tcg_temp_free_i64(t0);
2353 tcg_temp_free_i32(t1);
2354 if (unlikely(Rc(ctx->opcode) != 0)) {
2355 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2357 /* We can raise a differed exception */
2358 gen_helper_float_check_status();
2361 /*** Addressing modes ***/
2362 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2363 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2364 target_long maskl)
2366 target_long simm = SIMM(ctx->opcode);
2368 simm &= ~maskl;
2369 if (rA(ctx->opcode) == 0) {
2370 #if defined(TARGET_PPC64)
2371 if (!ctx->sf_mode) {
2372 tcg_gen_movi_tl(EA, (uint32_t)simm);
2373 } else
2374 #endif
2375 tcg_gen_movi_tl(EA, simm);
2376 } else if (likely(simm != 0)) {
2377 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2378 #if defined(TARGET_PPC64)
2379 if (!ctx->sf_mode) {
2380 tcg_gen_ext32u_tl(EA, EA);
2382 #endif
2383 } else {
2384 #if defined(TARGET_PPC64)
2385 if (!ctx->sf_mode) {
2386 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2387 } else
2388 #endif
2389 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2393 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2395 if (rA(ctx->opcode) == 0) {
2396 #if defined(TARGET_PPC64)
2397 if (!ctx->sf_mode) {
2398 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2399 } else
2400 #endif
2401 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2402 } else {
2403 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2404 #if defined(TARGET_PPC64)
2405 if (!ctx->sf_mode) {
2406 tcg_gen_ext32u_tl(EA, EA);
2408 #endif
2412 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2414 if (rA(ctx->opcode) == 0) {
2415 tcg_gen_movi_tl(EA, 0);
2416 } else {
2417 #if defined(TARGET_PPC64)
2418 if (!ctx->sf_mode) {
2419 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2420 } else
2421 #endif
2422 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2426 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2427 target_long val)
2429 tcg_gen_addi_tl(ret, arg1, val);
2430 #if defined(TARGET_PPC64)
2431 if (!ctx->sf_mode) {
2432 tcg_gen_ext32u_tl(ret, ret);
2434 #endif
2437 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2439 int l1 = gen_new_label();
2440 TCGv t0 = tcg_temp_new();
2441 TCGv_i32 t1, t2;
2442 /* NIP cannot be restored if the memory exception comes from an helper */
2443 gen_update_nip(ctx, ctx->nip - 4);
2444 tcg_gen_andi_tl(t0, EA, mask);
2445 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2446 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2447 t2 = tcg_const_i32(0);
2448 gen_helper_raise_exception_err(t1, t2);
2449 tcg_temp_free_i32(t1);
2450 tcg_temp_free_i32(t2);
2451 gen_set_label(l1);
2452 tcg_temp_free(t0);
2455 /*** Integer load ***/
2456 static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2458 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2461 static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2463 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2466 static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2468 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2469 if (unlikely(ctx->le_mode)) {
2470 tcg_gen_bswap16_tl(arg1, arg1);
2474 static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2476 if (unlikely(ctx->le_mode)) {
2477 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2478 tcg_gen_bswap16_tl(arg1, arg1);
2479 tcg_gen_ext16s_tl(arg1, arg1);
2480 } else {
2481 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2485 static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2487 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2488 if (unlikely(ctx->le_mode)) {
2489 tcg_gen_bswap32_tl(arg1, arg1);
2493 #if defined(TARGET_PPC64)
2494 static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2496 if (unlikely(ctx->le_mode)) {
2497 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2498 tcg_gen_bswap32_tl(arg1, arg1);
2499 tcg_gen_ext32s_tl(arg1, arg1);
2500 } else
2501 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2503 #endif
2505 static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2507 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2508 if (unlikely(ctx->le_mode)) {
2509 tcg_gen_bswap64_i64(arg1, arg1);
2513 static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2515 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2518 static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2520 if (unlikely(ctx->le_mode)) {
2521 TCGv t0 = tcg_temp_new();
2522 tcg_gen_ext16u_tl(t0, arg1);
2523 tcg_gen_bswap16_tl(t0, t0);
2524 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2525 tcg_temp_free(t0);
2526 } else {
2527 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2531 static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2533 if (unlikely(ctx->le_mode)) {
2534 TCGv t0 = tcg_temp_new();
2535 tcg_gen_ext32u_tl(t0, arg1);
2536 tcg_gen_bswap32_tl(t0, t0);
2537 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2538 tcg_temp_free(t0);
2539 } else {
2540 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2544 static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2546 if (unlikely(ctx->le_mode)) {
2547 TCGv_i64 t0 = tcg_temp_new_i64();
2548 tcg_gen_bswap64_i64(t0, arg1);
2549 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2550 tcg_temp_free_i64(t0);
2551 } else
2552 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2555 #define GEN_LD(name, ldop, opc, type) \
2556 static void glue(gen_, name)(DisasContext *ctx) \
2558 TCGv EA; \
2559 gen_set_access_type(ctx, ACCESS_INT); \
2560 EA = tcg_temp_new(); \
2561 gen_addr_imm_index(ctx, EA, 0); \
2562 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2563 tcg_temp_free(EA); \
2566 #define GEN_LDU(name, ldop, opc, type) \
2567 static void glue(gen_, name##u)(DisasContext *ctx) \
2569 TCGv EA; \
2570 if (unlikely(rA(ctx->opcode) == 0 || \
2571 rA(ctx->opcode) == rD(ctx->opcode))) { \
2572 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2573 return; \
2575 gen_set_access_type(ctx, ACCESS_INT); \
2576 EA = tcg_temp_new(); \
2577 if (type == PPC_64B) \
2578 gen_addr_imm_index(ctx, EA, 0x03); \
2579 else \
2580 gen_addr_imm_index(ctx, EA, 0); \
2581 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2582 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2583 tcg_temp_free(EA); \
2586 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2587 static void glue(gen_, name##ux)(DisasContext *ctx) \
2589 TCGv EA; \
2590 if (unlikely(rA(ctx->opcode) == 0 || \
2591 rA(ctx->opcode) == rD(ctx->opcode))) { \
2592 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2593 return; \
2595 gen_set_access_type(ctx, ACCESS_INT); \
2596 EA = tcg_temp_new(); \
2597 gen_addr_reg_index(ctx, EA); \
2598 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2599 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2600 tcg_temp_free(EA); \
2603 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2604 static void glue(gen_, name##x)(DisasContext *ctx) \
2606 TCGv EA; \
2607 gen_set_access_type(ctx, ACCESS_INT); \
2608 EA = tcg_temp_new(); \
2609 gen_addr_reg_index(ctx, EA); \
2610 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2611 tcg_temp_free(EA); \
2614 #define GEN_LDS(name, ldop, op, type) \
2615 GEN_LD(name, ldop, op | 0x20, type); \
2616 GEN_LDU(name, ldop, op | 0x21, type); \
2617 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2618 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2620 /* lbz lbzu lbzux lbzx */
2621 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2622 /* lha lhau lhaux lhax */
2623 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2624 /* lhz lhzu lhzux lhzx */
2625 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2626 /* lwz lwzu lwzux lwzx */
2627 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2628 #if defined(TARGET_PPC64)
2629 /* lwaux */
2630 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2631 /* lwax */
2632 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2633 /* ldux */
2634 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2635 /* ldx */
2636 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2638 static void gen_ld(DisasContext *ctx)
2640 TCGv EA;
2641 if (Rc(ctx->opcode)) {
2642 if (unlikely(rA(ctx->opcode) == 0 ||
2643 rA(ctx->opcode) == rD(ctx->opcode))) {
2644 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2645 return;
2648 gen_set_access_type(ctx, ACCESS_INT);
2649 EA = tcg_temp_new();
2650 gen_addr_imm_index(ctx, EA, 0x03);
2651 if (ctx->opcode & 0x02) {
2652 /* lwa (lwau is undefined) */
2653 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2654 } else {
2655 /* ld - ldu */
2656 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2658 if (Rc(ctx->opcode))
2659 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2660 tcg_temp_free(EA);
2663 /* lq */
2664 static void gen_lq(DisasContext *ctx)
2666 #if defined(CONFIG_USER_ONLY)
2667 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2668 #else
2669 int ra, rd;
2670 TCGv EA;
2672 /* Restore CPU state */
2673 if (unlikely(ctx->mem_idx == 0)) {
2674 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2675 return;
2677 ra = rA(ctx->opcode);
2678 rd = rD(ctx->opcode);
2679 if (unlikely((rd & 1) || rd == ra)) {
2680 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2681 return;
2683 if (unlikely(ctx->le_mode)) {
2684 /* Little-endian mode is not handled */
2685 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2686 return;
2688 gen_set_access_type(ctx, ACCESS_INT);
2689 EA = tcg_temp_new();
2690 gen_addr_imm_index(ctx, EA, 0x0F);
2691 gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2692 gen_addr_add(ctx, EA, EA, 8);
2693 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2694 tcg_temp_free(EA);
2695 #endif
2697 #endif
2699 /*** Integer store ***/
2700 #define GEN_ST(name, stop, opc, type) \
2701 static void glue(gen_, name)(DisasContext *ctx) \
2703 TCGv EA; \
2704 gen_set_access_type(ctx, ACCESS_INT); \
2705 EA = tcg_temp_new(); \
2706 gen_addr_imm_index(ctx, EA, 0); \
2707 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2708 tcg_temp_free(EA); \
2711 #define GEN_STU(name, stop, opc, type) \
2712 static void glue(gen_, stop##u)(DisasContext *ctx) \
2714 TCGv EA; \
2715 if (unlikely(rA(ctx->opcode) == 0)) { \
2716 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2717 return; \
2719 gen_set_access_type(ctx, ACCESS_INT); \
2720 EA = tcg_temp_new(); \
2721 if (type == PPC_64B) \
2722 gen_addr_imm_index(ctx, EA, 0x03); \
2723 else \
2724 gen_addr_imm_index(ctx, EA, 0); \
2725 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2726 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2727 tcg_temp_free(EA); \
2730 #define GEN_STUX(name, stop, opc2, opc3, type) \
2731 static void glue(gen_, name##ux)(DisasContext *ctx) \
2733 TCGv EA; \
2734 if (unlikely(rA(ctx->opcode) == 0)) { \
2735 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2736 return; \
2738 gen_set_access_type(ctx, ACCESS_INT); \
2739 EA = tcg_temp_new(); \
2740 gen_addr_reg_index(ctx, EA); \
2741 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2742 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2743 tcg_temp_free(EA); \
2746 #define GEN_STX(name, stop, opc2, opc3, type) \
2747 static void glue(gen_, name##x)(DisasContext *ctx) \
2749 TCGv EA; \
2750 gen_set_access_type(ctx, ACCESS_INT); \
2751 EA = tcg_temp_new(); \
2752 gen_addr_reg_index(ctx, EA); \
2753 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2754 tcg_temp_free(EA); \
2757 #define GEN_STS(name, stop, op, type) \
2758 GEN_ST(name, stop, op | 0x20, type); \
2759 GEN_STU(name, stop, op | 0x21, type); \
2760 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2761 GEN_STX(name, stop, 0x17, op | 0x00, type)
2763 /* stb stbu stbux stbx */
2764 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2765 /* sth sthu sthux sthx */
2766 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2767 /* stw stwu stwux stwx */
2768 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2769 #if defined(TARGET_PPC64)
2770 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2771 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2773 static void gen_std(DisasContext *ctx)
2775 int rs;
2776 TCGv EA;
2778 rs = rS(ctx->opcode);
2779 if ((ctx->opcode & 0x3) == 0x2) {
2780 #if defined(CONFIG_USER_ONLY)
2781 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2782 #else
2783 /* stq */
2784 if (unlikely(ctx->mem_idx == 0)) {
2785 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2786 return;
2788 if (unlikely(rs & 1)) {
2789 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2790 return;
2792 if (unlikely(ctx->le_mode)) {
2793 /* Little-endian mode is not handled */
2794 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2795 return;
2797 gen_set_access_type(ctx, ACCESS_INT);
2798 EA = tcg_temp_new();
2799 gen_addr_imm_index(ctx, EA, 0x03);
2800 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2801 gen_addr_add(ctx, EA, EA, 8);
2802 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2803 tcg_temp_free(EA);
2804 #endif
2805 } else {
2806 /* std / stdu */
2807 if (Rc(ctx->opcode)) {
2808 if (unlikely(rA(ctx->opcode) == 0)) {
2809 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2810 return;
2813 gen_set_access_type(ctx, ACCESS_INT);
2814 EA = tcg_temp_new();
2815 gen_addr_imm_index(ctx, EA, 0x03);
2816 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2817 if (Rc(ctx->opcode))
2818 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2819 tcg_temp_free(EA);
2822 #endif
2823 /*** Integer load and store with byte reverse ***/
2824 /* lhbrx */
2825 static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2827 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2828 if (likely(!ctx->le_mode)) {
2829 tcg_gen_bswap16_tl(arg1, arg1);
2832 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2834 /* lwbrx */
2835 static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2837 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2838 if (likely(!ctx->le_mode)) {
2839 tcg_gen_bswap32_tl(arg1, arg1);
2842 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2844 /* sthbrx */
2845 static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2847 if (likely(!ctx->le_mode)) {
2848 TCGv t0 = tcg_temp_new();
2849 tcg_gen_ext16u_tl(t0, arg1);
2850 tcg_gen_bswap16_tl(t0, t0);
2851 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2852 tcg_temp_free(t0);
2853 } else {
2854 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2857 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2859 /* stwbrx */
2860 static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2862 if (likely(!ctx->le_mode)) {
2863 TCGv t0 = tcg_temp_new();
2864 tcg_gen_ext32u_tl(t0, arg1);
2865 tcg_gen_bswap32_tl(t0, t0);
2866 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2867 tcg_temp_free(t0);
2868 } else {
2869 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2872 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2874 /*** Integer load and store multiple ***/
2876 /* lmw */
2877 static void gen_lmw(DisasContext *ctx)
2879 TCGv t0;
2880 TCGv_i32 t1;
2881 gen_set_access_type(ctx, ACCESS_INT);
2882 /* NIP cannot be restored if the memory exception comes from an helper */
2883 gen_update_nip(ctx, ctx->nip - 4);
2884 t0 = tcg_temp_new();
2885 t1 = tcg_const_i32(rD(ctx->opcode));
2886 gen_addr_imm_index(ctx, t0, 0);
2887 gen_helper_lmw(t0, t1);
2888 tcg_temp_free(t0);
2889 tcg_temp_free_i32(t1);
2892 /* stmw */
2893 static void gen_stmw(DisasContext *ctx)
2895 TCGv t0;
2896 TCGv_i32 t1;
2897 gen_set_access_type(ctx, ACCESS_INT);
2898 /* NIP cannot be restored if the memory exception comes from an helper */
2899 gen_update_nip(ctx, ctx->nip - 4);
2900 t0 = tcg_temp_new();
2901 t1 = tcg_const_i32(rS(ctx->opcode));
2902 gen_addr_imm_index(ctx, t0, 0);
2903 gen_helper_stmw(t0, t1);
2904 tcg_temp_free(t0);
2905 tcg_temp_free_i32(t1);
2908 /*** Integer load and store strings ***/
2910 /* lswi */
2911 /* PowerPC32 specification says we must generate an exception if
2912 * rA is in the range of registers to be loaded.
2913 * In an other hand, IBM says this is valid, but rA won't be loaded.
2914 * For now, I'll follow the spec...
2916 static void gen_lswi(DisasContext *ctx)
2918 TCGv t0;
2919 TCGv_i32 t1, t2;
2920 int nb = NB(ctx->opcode);
2921 int start = rD(ctx->opcode);
2922 int ra = rA(ctx->opcode);
2923 int nr;
2925 if (nb == 0)
2926 nb = 32;
2927 nr = nb / 4;
2928 if (unlikely(((start + nr) > 32 &&
2929 start <= ra && (start + nr - 32) > ra) ||
2930 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2931 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2932 return;
2934 gen_set_access_type(ctx, ACCESS_INT);
2935 /* NIP cannot be restored if the memory exception comes from an helper */
2936 gen_update_nip(ctx, ctx->nip - 4);
2937 t0 = tcg_temp_new();
2938 gen_addr_register(ctx, t0);
2939 t1 = tcg_const_i32(nb);
2940 t2 = tcg_const_i32(start);
2941 gen_helper_lsw(t0, t1, t2);
2942 tcg_temp_free(t0);
2943 tcg_temp_free_i32(t1);
2944 tcg_temp_free_i32(t2);
2947 /* lswx */
2948 static void gen_lswx(DisasContext *ctx)
2950 TCGv t0;
2951 TCGv_i32 t1, t2, t3;
2952 gen_set_access_type(ctx, ACCESS_INT);
2953 /* NIP cannot be restored if the memory exception comes from an helper */
2954 gen_update_nip(ctx, ctx->nip - 4);
2955 t0 = tcg_temp_new();
2956 gen_addr_reg_index(ctx, t0);
2957 t1 = tcg_const_i32(rD(ctx->opcode));
2958 t2 = tcg_const_i32(rA(ctx->opcode));
2959 t3 = tcg_const_i32(rB(ctx->opcode));
2960 gen_helper_lswx(t0, t1, t2, t3);
2961 tcg_temp_free(t0);
2962 tcg_temp_free_i32(t1);
2963 tcg_temp_free_i32(t2);
2964 tcg_temp_free_i32(t3);
2967 /* stswi */
2968 static void gen_stswi(DisasContext *ctx)
2970 TCGv t0;
2971 TCGv_i32 t1, t2;
2972 int nb = NB(ctx->opcode);
2973 gen_set_access_type(ctx, ACCESS_INT);
2974 /* NIP cannot be restored if the memory exception comes from an helper */
2975 gen_update_nip(ctx, ctx->nip - 4);
2976 t0 = tcg_temp_new();
2977 gen_addr_register(ctx, t0);
2978 if (nb == 0)
2979 nb = 32;
2980 t1 = tcg_const_i32(nb);
2981 t2 = tcg_const_i32(rS(ctx->opcode));
2982 gen_helper_stsw(t0, t1, t2);
2983 tcg_temp_free(t0);
2984 tcg_temp_free_i32(t1);
2985 tcg_temp_free_i32(t2);
2988 /* stswx */
2989 static void gen_stswx(DisasContext *ctx)
2991 TCGv t0;
2992 TCGv_i32 t1, t2;
2993 gen_set_access_type(ctx, ACCESS_INT);
2994 /* NIP cannot be restored if the memory exception comes from an helper */
2995 gen_update_nip(ctx, ctx->nip - 4);
2996 t0 = tcg_temp_new();
2997 gen_addr_reg_index(ctx, t0);
2998 t1 = tcg_temp_new_i32();
2999 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3000 tcg_gen_andi_i32(t1, t1, 0x7F);
3001 t2 = tcg_const_i32(rS(ctx->opcode));
3002 gen_helper_stsw(t0, t1, t2);
3003 tcg_temp_free(t0);
3004 tcg_temp_free_i32(t1);
3005 tcg_temp_free_i32(t2);
3008 /*** Memory synchronisation ***/
3009 /* eieio */
3010 static void gen_eieio(DisasContext *ctx)
3014 /* isync */
3015 static void gen_isync(DisasContext *ctx)
3017 gen_stop_exception(ctx);
3020 /* lwarx */
3021 static void gen_lwarx(DisasContext *ctx)
3023 TCGv t0;
3024 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3025 gen_set_access_type(ctx, ACCESS_RES);
3026 t0 = tcg_temp_local_new();
3027 gen_addr_reg_index(ctx, t0);
3028 gen_check_align(ctx, t0, 0x03);
3029 gen_qemu_ld32u(ctx, gpr, t0);
3030 tcg_gen_mov_tl(cpu_reserve, t0);
3031 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3032 tcg_temp_free(t0);
3035 #if defined(CONFIG_USER_ONLY)
3036 static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3037 int reg, int size)
3039 TCGv t0 = tcg_temp_new();
3040 uint32_t save_exception = ctx->exception;
3042 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUState, reserve_ea));
3043 tcg_gen_movi_tl(t0, (size << 5) | reg);
3044 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, reserve_info));
3045 tcg_temp_free(t0);
3046 gen_update_nip(ctx, ctx->nip-4);
3047 ctx->exception = POWERPC_EXCP_BRANCH;
3048 gen_exception(ctx, POWERPC_EXCP_STCX);
3049 ctx->exception = save_exception;
3051 #endif
3053 /* stwcx. */
3054 static void gen_stwcx_(DisasContext *ctx)
3056 TCGv t0;
3057 gen_set_access_type(ctx, ACCESS_RES);
3058 t0 = tcg_temp_local_new();
3059 gen_addr_reg_index(ctx, t0);
3060 gen_check_align(ctx, t0, 0x03);
3061 #if defined(CONFIG_USER_ONLY)
3062 gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3063 #else
3065 int l1;
3067 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3068 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3069 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3070 l1 = gen_new_label();
3071 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3072 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3073 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3074 gen_set_label(l1);
3075 tcg_gen_movi_tl(cpu_reserve, -1);
3077 #endif
3078 tcg_temp_free(t0);
3081 #if defined(TARGET_PPC64)
3082 /* ldarx */
3083 static void gen_ldarx(DisasContext *ctx)
3085 TCGv t0;
3086 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3087 gen_set_access_type(ctx, ACCESS_RES);
3088 t0 = tcg_temp_local_new();
3089 gen_addr_reg_index(ctx, t0);
3090 gen_check_align(ctx, t0, 0x07);
3091 gen_qemu_ld64(ctx, gpr, t0);
3092 tcg_gen_mov_tl(cpu_reserve, t0);
3093 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3094 tcg_temp_free(t0);
3097 /* stdcx. */
3098 static void gen_stdcx_(DisasContext *ctx)
3100 TCGv t0;
3101 gen_set_access_type(ctx, ACCESS_RES);
3102 t0 = tcg_temp_local_new();
3103 gen_addr_reg_index(ctx, t0);
3104 gen_check_align(ctx, t0, 0x07);
3105 #if defined(CONFIG_USER_ONLY)
3106 gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3107 #else
3109 int l1;
3110 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3111 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3112 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3113 l1 = gen_new_label();
3114 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3115 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3116 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3117 gen_set_label(l1);
3118 tcg_gen_movi_tl(cpu_reserve, -1);
3120 #endif
3121 tcg_temp_free(t0);
3123 #endif /* defined(TARGET_PPC64) */
3125 /* sync */
3126 static void gen_sync(DisasContext *ctx)
3130 /* wait */
3131 static void gen_wait(DisasContext *ctx)
3133 TCGv_i32 t0 = tcg_temp_new_i32();
3134 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3135 tcg_temp_free_i32(t0);
3136 /* Stop translation, as the CPU is supposed to sleep from now */
3137 gen_exception_err(ctx, EXCP_HLT, 1);
3140 /*** Floating-point load ***/
3141 #define GEN_LDF(name, ldop, opc, type) \
3142 static void glue(gen_, name)(DisasContext *ctx) \
3144 TCGv EA; \
3145 if (unlikely(!ctx->fpu_enabled)) { \
3146 gen_exception(ctx, POWERPC_EXCP_FPU); \
3147 return; \
3149 gen_set_access_type(ctx, ACCESS_FLOAT); \
3150 EA = tcg_temp_new(); \
3151 gen_addr_imm_index(ctx, EA, 0); \
3152 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3153 tcg_temp_free(EA); \
3156 #define GEN_LDUF(name, ldop, opc, type) \
3157 static void glue(gen_, name##u)(DisasContext *ctx) \
3159 TCGv EA; \
3160 if (unlikely(!ctx->fpu_enabled)) { \
3161 gen_exception(ctx, POWERPC_EXCP_FPU); \
3162 return; \
3164 if (unlikely(rA(ctx->opcode) == 0)) { \
3165 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3166 return; \
3168 gen_set_access_type(ctx, ACCESS_FLOAT); \
3169 EA = tcg_temp_new(); \
3170 gen_addr_imm_index(ctx, EA, 0); \
3171 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3172 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3173 tcg_temp_free(EA); \
3176 #define GEN_LDUXF(name, ldop, opc, type) \
3177 static void glue(gen_, name##ux)(DisasContext *ctx) \
3179 TCGv EA; \
3180 if (unlikely(!ctx->fpu_enabled)) { \
3181 gen_exception(ctx, POWERPC_EXCP_FPU); \
3182 return; \
3184 if (unlikely(rA(ctx->opcode) == 0)) { \
3185 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3186 return; \
3188 gen_set_access_type(ctx, ACCESS_FLOAT); \
3189 EA = tcg_temp_new(); \
3190 gen_addr_reg_index(ctx, EA); \
3191 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3192 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3193 tcg_temp_free(EA); \
3196 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3197 static void glue(gen_, name##x)(DisasContext *ctx) \
3199 TCGv EA; \
3200 if (unlikely(!ctx->fpu_enabled)) { \
3201 gen_exception(ctx, POWERPC_EXCP_FPU); \
3202 return; \
3204 gen_set_access_type(ctx, ACCESS_FLOAT); \
3205 EA = tcg_temp_new(); \
3206 gen_addr_reg_index(ctx, EA); \
3207 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3208 tcg_temp_free(EA); \
3211 #define GEN_LDFS(name, ldop, op, type) \
3212 GEN_LDF(name, ldop, op | 0x20, type); \
3213 GEN_LDUF(name, ldop, op | 0x21, type); \
3214 GEN_LDUXF(name, ldop, op | 0x01, type); \
3215 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3217 static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3219 TCGv t0 = tcg_temp_new();
3220 TCGv_i32 t1 = tcg_temp_new_i32();
3221 gen_qemu_ld32u(ctx, t0, arg2);
3222 tcg_gen_trunc_tl_i32(t1, t0);
3223 tcg_temp_free(t0);
3224 gen_helper_float32_to_float64(arg1, t1);
3225 tcg_temp_free_i32(t1);
3228 /* lfd lfdu lfdux lfdx */
3229 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3230 /* lfs lfsu lfsux lfsx */
3231 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3233 /*** Floating-point store ***/
3234 #define GEN_STF(name, stop, opc, type) \
3235 static void glue(gen_, name)(DisasContext *ctx) \
3237 TCGv EA; \
3238 if (unlikely(!ctx->fpu_enabled)) { \
3239 gen_exception(ctx, POWERPC_EXCP_FPU); \
3240 return; \
3242 gen_set_access_type(ctx, ACCESS_FLOAT); \
3243 EA = tcg_temp_new(); \
3244 gen_addr_imm_index(ctx, EA, 0); \
3245 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3246 tcg_temp_free(EA); \
3249 #define GEN_STUF(name, stop, opc, type) \
3250 static void glue(gen_, name##u)(DisasContext *ctx) \
3252 TCGv EA; \
3253 if (unlikely(!ctx->fpu_enabled)) { \
3254 gen_exception(ctx, POWERPC_EXCP_FPU); \
3255 return; \
3257 if (unlikely(rA(ctx->opcode) == 0)) { \
3258 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3259 return; \
3261 gen_set_access_type(ctx, ACCESS_FLOAT); \
3262 EA = tcg_temp_new(); \
3263 gen_addr_imm_index(ctx, EA, 0); \
3264 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3265 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3266 tcg_temp_free(EA); \
3269 #define GEN_STUXF(name, stop, opc, type) \
3270 static void glue(gen_, name##ux)(DisasContext *ctx) \
3272 TCGv EA; \
3273 if (unlikely(!ctx->fpu_enabled)) { \
3274 gen_exception(ctx, POWERPC_EXCP_FPU); \
3275 return; \
3277 if (unlikely(rA(ctx->opcode) == 0)) { \
3278 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3279 return; \
3281 gen_set_access_type(ctx, ACCESS_FLOAT); \
3282 EA = tcg_temp_new(); \
3283 gen_addr_reg_index(ctx, EA); \
3284 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3285 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3286 tcg_temp_free(EA); \
3289 #define GEN_STXF(name, stop, opc2, opc3, type) \
3290 static void glue(gen_, name##x)(DisasContext *ctx) \
3292 TCGv EA; \
3293 if (unlikely(!ctx->fpu_enabled)) { \
3294 gen_exception(ctx, POWERPC_EXCP_FPU); \
3295 return; \
3297 gen_set_access_type(ctx, ACCESS_FLOAT); \
3298 EA = tcg_temp_new(); \
3299 gen_addr_reg_index(ctx, EA); \
3300 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3301 tcg_temp_free(EA); \
3304 #define GEN_STFS(name, stop, op, type) \
3305 GEN_STF(name, stop, op | 0x20, type); \
3306 GEN_STUF(name, stop, op | 0x21, type); \
3307 GEN_STUXF(name, stop, op | 0x01, type); \
3308 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3310 static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3312 TCGv_i32 t0 = tcg_temp_new_i32();
3313 TCGv t1 = tcg_temp_new();
3314 gen_helper_float64_to_float32(t0, arg1);
3315 tcg_gen_extu_i32_tl(t1, t0);
3316 tcg_temp_free_i32(t0);
3317 gen_qemu_st32(ctx, t1, arg2);
3318 tcg_temp_free(t1);
3321 /* stfd stfdu stfdux stfdx */
3322 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3323 /* stfs stfsu stfsux stfsx */
3324 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3326 /* Optional: */
3327 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3329 TCGv t0 = tcg_temp_new();
3330 tcg_gen_trunc_i64_tl(t0, arg1),
3331 gen_qemu_st32(ctx, t0, arg2);
3332 tcg_temp_free(t0);
3334 /* stfiwx */
3335 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3337 /*** Branch ***/
3338 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3340 TranslationBlock *tb;
3341 tb = ctx->tb;
3342 #if defined(TARGET_PPC64)
3343 if (!ctx->sf_mode)
3344 dest = (uint32_t) dest;
3345 #endif
3346 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3347 likely(!ctx->singlestep_enabled)) {
3348 tcg_gen_goto_tb(n);
3349 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3350 tcg_gen_exit_tb((tcg_target_long)tb + n);
3351 } else {
3352 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3353 if (unlikely(ctx->singlestep_enabled)) {
3354 if ((ctx->singlestep_enabled &
3355 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3356 ctx->exception == POWERPC_EXCP_BRANCH) {
3357 target_ulong tmp = ctx->nip;
3358 ctx->nip = dest;
3359 gen_exception(ctx, POWERPC_EXCP_TRACE);
3360 ctx->nip = tmp;
3362 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3363 gen_debug_exception(ctx);
3366 tcg_gen_exit_tb(0);
3370 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3372 #if defined(TARGET_PPC64)
3373 if (ctx->sf_mode == 0)
3374 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3375 else
3376 #endif
3377 tcg_gen_movi_tl(cpu_lr, nip);
3380 /* b ba bl bla */
3381 static void gen_b(DisasContext *ctx)
3383 target_ulong li, target;
3385 ctx->exception = POWERPC_EXCP_BRANCH;
3386 /* sign extend LI */
3387 #if defined(TARGET_PPC64)
3388 if (ctx->sf_mode)
3389 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3390 else
3391 #endif
3392 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3393 if (likely(AA(ctx->opcode) == 0))
3394 target = ctx->nip + li - 4;
3395 else
3396 target = li;
3397 if (LK(ctx->opcode))
3398 gen_setlr(ctx, ctx->nip);
3399 gen_goto_tb(ctx, 0, target);
3402 #define BCOND_IM 0
3403 #define BCOND_LR 1
3404 #define BCOND_CTR 2
3406 static inline void gen_bcond(DisasContext *ctx, int type)
3408 uint32_t bo = BO(ctx->opcode);
3409 int l1;
3410 TCGv target;
3412 ctx->exception = POWERPC_EXCP_BRANCH;
3413 if (type == BCOND_LR || type == BCOND_CTR) {
3414 target = tcg_temp_local_new();
3415 if (type == BCOND_CTR)
3416 tcg_gen_mov_tl(target, cpu_ctr);
3417 else
3418 tcg_gen_mov_tl(target, cpu_lr);
3419 } else {
3420 TCGV_UNUSED(target);
3422 if (LK(ctx->opcode))
3423 gen_setlr(ctx, ctx->nip);
3424 l1 = gen_new_label();
3425 if ((bo & 0x4) == 0) {
3426 /* Decrement and test CTR */
3427 TCGv temp = tcg_temp_new();
3428 if (unlikely(type == BCOND_CTR)) {
3429 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3430 return;
3432 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3433 #if defined(TARGET_PPC64)
3434 if (!ctx->sf_mode)
3435 tcg_gen_ext32u_tl(temp, cpu_ctr);
3436 else
3437 #endif
3438 tcg_gen_mov_tl(temp, cpu_ctr);
3439 if (bo & 0x2) {
3440 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3441 } else {
3442 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3444 tcg_temp_free(temp);
3446 if ((bo & 0x10) == 0) {
3447 /* Test CR */
3448 uint32_t bi = BI(ctx->opcode);
3449 uint32_t mask = 1 << (3 - (bi & 0x03));
3450 TCGv_i32 temp = tcg_temp_new_i32();
3452 if (bo & 0x8) {
3453 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3454 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3455 } else {
3456 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3457 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3459 tcg_temp_free_i32(temp);
3461 if (type == BCOND_IM) {
3462 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3463 if (likely(AA(ctx->opcode) == 0)) {
3464 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3465 } else {
3466 gen_goto_tb(ctx, 0, li);
3468 gen_set_label(l1);
3469 gen_goto_tb(ctx, 1, ctx->nip);
3470 } else {
3471 #if defined(TARGET_PPC64)
3472 if (!(ctx->sf_mode))
3473 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3474 else
3475 #endif
3476 tcg_gen_andi_tl(cpu_nip, target, ~3);
3477 tcg_gen_exit_tb(0);
3478 gen_set_label(l1);
3479 #if defined(TARGET_PPC64)
3480 if (!(ctx->sf_mode))
3481 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3482 else
3483 #endif
3484 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3485 tcg_gen_exit_tb(0);
3489 static void gen_bc(DisasContext *ctx)
3491 gen_bcond(ctx, BCOND_IM);
3494 static void gen_bcctr(DisasContext *ctx)
3496 gen_bcond(ctx, BCOND_CTR);
3499 static void gen_bclr(DisasContext *ctx)
3501 gen_bcond(ctx, BCOND_LR);
3504 /*** Condition register logical ***/
3505 #define GEN_CRLOGIC(name, tcg_op, opc) \
3506 static void glue(gen_, name)(DisasContext *ctx) \
3508 uint8_t bitmask; \
3509 int sh; \
3510 TCGv_i32 t0, t1; \
3511 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3512 t0 = tcg_temp_new_i32(); \
3513 if (sh > 0) \
3514 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3515 else if (sh < 0) \
3516 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3517 else \
3518 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3519 t1 = tcg_temp_new_i32(); \
3520 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3521 if (sh > 0) \
3522 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3523 else if (sh < 0) \
3524 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3525 else \
3526 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3527 tcg_op(t0, t0, t1); \
3528 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3529 tcg_gen_andi_i32(t0, t0, bitmask); \
3530 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3531 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3532 tcg_temp_free_i32(t0); \
3533 tcg_temp_free_i32(t1); \
3536 /* crand */
3537 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3538 /* crandc */
3539 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3540 /* creqv */
3541 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3542 /* crnand */
3543 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3544 /* crnor */
3545 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3546 /* cror */
3547 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3548 /* crorc */
3549 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3550 /* crxor */
3551 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3553 /* mcrf */
3554 static void gen_mcrf(DisasContext *ctx)
3556 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3559 /*** System linkage ***/
3561 /* rfi (mem_idx only) */
3562 static void gen_rfi(DisasContext *ctx)
3564 #if defined(CONFIG_USER_ONLY)
3565 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3566 #else
3567 /* Restore CPU state */
3568 if (unlikely(!ctx->mem_idx)) {
3569 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3570 return;
3572 gen_helper_rfi();
3573 gen_sync_exception(ctx);
3574 #endif
3577 #if defined(TARGET_PPC64)
3578 static void gen_rfid(DisasContext *ctx)
3580 #if defined(CONFIG_USER_ONLY)
3581 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3582 #else
3583 /* Restore CPU state */
3584 if (unlikely(!ctx->mem_idx)) {
3585 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3586 return;
3588 gen_helper_rfid();
3589 gen_sync_exception(ctx);
3590 #endif
3593 static void gen_hrfid(DisasContext *ctx)
3595 #if defined(CONFIG_USER_ONLY)
3596 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3597 #else
3598 /* Restore CPU state */
3599 if (unlikely(ctx->mem_idx <= 1)) {
3600 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3601 return;
3603 gen_helper_hrfid();
3604 gen_sync_exception(ctx);
3605 #endif
3607 #endif
3609 /* sc */
3610 #if defined(CONFIG_USER_ONLY)
3611 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3612 #else
3613 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3614 #endif
3615 static void gen_sc(DisasContext *ctx)
3617 uint32_t lev;
3619 lev = (ctx->opcode >> 5) & 0x7F;
3620 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3623 /*** Trap ***/
3625 /* tw */
3626 static void gen_tw(DisasContext *ctx)
3628 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3629 /* Update the nip since this might generate a trap exception */
3630 gen_update_nip(ctx, ctx->nip);
3631 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3632 tcg_temp_free_i32(t0);
3635 /* twi */
3636 static void gen_twi(DisasContext *ctx)
3638 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3639 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3640 /* Update the nip since this might generate a trap exception */
3641 gen_update_nip(ctx, ctx->nip);
3642 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3643 tcg_temp_free(t0);
3644 tcg_temp_free_i32(t1);
3647 #if defined(TARGET_PPC64)
3648 /* td */
3649 static void gen_td(DisasContext *ctx)
3651 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3652 /* Update the nip since this might generate a trap exception */
3653 gen_update_nip(ctx, ctx->nip);
3654 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3655 tcg_temp_free_i32(t0);
3658 /* tdi */
3659 static void gen_tdi(DisasContext *ctx)
3661 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3662 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3663 /* Update the nip since this might generate a trap exception */
3664 gen_update_nip(ctx, ctx->nip);
3665 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3666 tcg_temp_free(t0);
3667 tcg_temp_free_i32(t1);
3669 #endif
3671 /*** Processor control ***/
3673 /* mcrxr */
3674 static void gen_mcrxr(DisasContext *ctx)
3676 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3677 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3678 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3681 /* mfcr mfocrf */
3682 static void gen_mfcr(DisasContext *ctx)
3684 uint32_t crm, crn;
3686 if (likely(ctx->opcode & 0x00100000)) {
3687 crm = CRM(ctx->opcode);
3688 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3689 crn = ctz32 (crm);
3690 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3691 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3692 cpu_gpr[rD(ctx->opcode)], crn * 4);
3694 } else {
3695 TCGv_i32 t0 = tcg_temp_new_i32();
3696 tcg_gen_mov_i32(t0, cpu_crf[0]);
3697 tcg_gen_shli_i32(t0, t0, 4);
3698 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3699 tcg_gen_shli_i32(t0, t0, 4);
3700 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3701 tcg_gen_shli_i32(t0, t0, 4);
3702 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3703 tcg_gen_shli_i32(t0, t0, 4);
3704 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3705 tcg_gen_shli_i32(t0, t0, 4);
3706 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3707 tcg_gen_shli_i32(t0, t0, 4);
3708 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3709 tcg_gen_shli_i32(t0, t0, 4);
3710 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3711 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3712 tcg_temp_free_i32(t0);
3716 /* mfmsr */
3717 static void gen_mfmsr(DisasContext *ctx)
3719 #if defined(CONFIG_USER_ONLY)
3720 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3721 #else
3722 if (unlikely(!ctx->mem_idx)) {
3723 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3724 return;
3726 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3727 #endif
3730 static void spr_noaccess(void *opaque, int gprn, int sprn)
3732 #if 0
3733 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3734 printf("ERROR: try to access SPR %d !\n", sprn);
3735 #endif
3737 #define SPR_NOACCESS (&spr_noaccess)
3739 /* mfspr */
3740 static inline void gen_op_mfspr(DisasContext *ctx)
3742 void (*read_cb)(void *opaque, int gprn, int sprn);
3743 uint32_t sprn = SPR(ctx->opcode);
3745 #if !defined(CONFIG_USER_ONLY)
3746 if (ctx->mem_idx == 2)
3747 read_cb = ctx->spr_cb[sprn].hea_read;
3748 else if (ctx->mem_idx)
3749 read_cb = ctx->spr_cb[sprn].oea_read;
3750 else
3751 #endif
3752 read_cb = ctx->spr_cb[sprn].uea_read;
3753 if (likely(read_cb != NULL)) {
3754 if (likely(read_cb != SPR_NOACCESS)) {
3755 (*read_cb)(ctx, rD(ctx->opcode), sprn);
3756 } else {
3757 /* Privilege exception */
3758 /* This is a hack to avoid warnings when running Linux:
3759 * this OS breaks the PowerPC virtualisation model,
3760 * allowing userland application to read the PVR
3762 if (sprn != SPR_PVR) {
3763 qemu_log("Trying to read privileged spr %d %03x at "
3764 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3765 printf("Trying to read privileged spr %d %03x at "
3766 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3768 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3770 } else {
3771 /* Not defined */
3772 qemu_log("Trying to read invalid spr %d %03x at "
3773 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3774 printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
3775 sprn, sprn, ctx->nip);
3776 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3780 static void gen_mfspr(DisasContext *ctx)
3782 gen_op_mfspr(ctx);
3785 /* mftb */
3786 static void gen_mftb(DisasContext *ctx)
3788 gen_op_mfspr(ctx);
3791 /* mtcrf mtocrf*/
3792 static void gen_mtcrf(DisasContext *ctx)
3794 uint32_t crm, crn;
3796 crm = CRM(ctx->opcode);
3797 if (likely((ctx->opcode & 0x00100000))) {
3798 if (crm && ((crm & (crm - 1)) == 0)) {
3799 TCGv_i32 temp = tcg_temp_new_i32();
3800 crn = ctz32 (crm);
3801 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3802 tcg_gen_shri_i32(temp, temp, crn * 4);
3803 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3804 tcg_temp_free_i32(temp);
3806 } else {
3807 TCGv_i32 temp = tcg_temp_new_i32();
3808 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3809 for (crn = 0 ; crn < 8 ; crn++) {
3810 if (crm & (1 << crn)) {
3811 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3812 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3815 tcg_temp_free_i32(temp);
3819 /* mtmsr */
3820 #if defined(TARGET_PPC64)
3821 static void gen_mtmsrd(DisasContext *ctx)
3823 #if defined(CONFIG_USER_ONLY)
3824 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3825 #else
3826 if (unlikely(!ctx->mem_idx)) {
3827 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3828 return;
3830 if (ctx->opcode & 0x00010000) {
3831 /* Special form that does not need any synchronisation */
3832 TCGv t0 = tcg_temp_new();
3833 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3834 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3835 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3836 tcg_temp_free(t0);
3837 } else {
3838 /* XXX: we need to update nip before the store
3839 * if we enter power saving mode, we will exit the loop
3840 * directly from ppc_store_msr
3842 gen_update_nip(ctx, ctx->nip);
3843 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3844 /* Must stop the translation as machine state (may have) changed */
3845 /* Note that mtmsr is not always defined as context-synchronizing */
3846 gen_stop_exception(ctx);
3848 #endif
3850 #endif
3852 static void gen_mtmsr(DisasContext *ctx)
3854 #if defined(CONFIG_USER_ONLY)
3855 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3856 #else
3857 if (unlikely(!ctx->mem_idx)) {
3858 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3859 return;
3861 if (ctx->opcode & 0x00010000) {
3862 /* Special form that does not need any synchronisation */
3863 TCGv t0 = tcg_temp_new();
3864 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3865 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3866 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3867 tcg_temp_free(t0);
3868 } else {
3869 /* XXX: we need to update nip before the store
3870 * if we enter power saving mode, we will exit the loop
3871 * directly from ppc_store_msr
3873 gen_update_nip(ctx, ctx->nip);
3874 #if defined(TARGET_PPC64)
3875 if (!ctx->sf_mode) {
3876 TCGv t0 = tcg_temp_new();
3877 TCGv t1 = tcg_temp_new();
3878 tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL);
3879 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
3880 tcg_gen_or_tl(t0, t0, t1);
3881 tcg_temp_free(t1);
3882 gen_helper_store_msr(t0);
3883 tcg_temp_free(t0);
3884 } else
3885 #endif
3886 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3887 /* Must stop the translation as machine state (may have) changed */
3888 /* Note that mtmsr is not always defined as context-synchronizing */
3889 gen_stop_exception(ctx);
3891 #endif
3894 /* mtspr */
3895 static void gen_mtspr(DisasContext *ctx)
3897 void (*write_cb)(void *opaque, int sprn, int gprn);
3898 uint32_t sprn = SPR(ctx->opcode);
3900 #if !defined(CONFIG_USER_ONLY)
3901 if (ctx->mem_idx == 2)
3902 write_cb = ctx->spr_cb[sprn].hea_write;
3903 else if (ctx->mem_idx)
3904 write_cb = ctx->spr_cb[sprn].oea_write;
3905 else
3906 #endif
3907 write_cb = ctx->spr_cb[sprn].uea_write;
3908 if (likely(write_cb != NULL)) {
3909 if (likely(write_cb != SPR_NOACCESS)) {
3910 (*write_cb)(ctx, sprn, rS(ctx->opcode));
3911 } else {
3912 /* Privilege exception */
3913 qemu_log("Trying to write privileged spr %d %03x at "
3914 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3915 printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
3916 "\n", sprn, sprn, ctx->nip);
3917 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3919 } else {
3920 /* Not defined */
3921 qemu_log("Trying to write invalid spr %d %03x at "
3922 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3923 printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
3924 sprn, sprn, ctx->nip);
3925 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3929 /*** Cache management ***/
3931 /* dcbf */
3932 static void gen_dcbf(DisasContext *ctx)
3934 /* XXX: specification says this is treated as a load by the MMU */
3935 TCGv t0;
3936 gen_set_access_type(ctx, ACCESS_CACHE);
3937 t0 = tcg_temp_new();
3938 gen_addr_reg_index(ctx, t0);
3939 gen_qemu_ld8u(ctx, t0, t0);
3940 tcg_temp_free(t0);
3943 /* dcbi (Supervisor only) */
3944 static void gen_dcbi(DisasContext *ctx)
3946 #if defined(CONFIG_USER_ONLY)
3947 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3948 #else
3949 TCGv EA, val;
3950 if (unlikely(!ctx->mem_idx)) {
3951 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3952 return;
3954 EA = tcg_temp_new();
3955 gen_set_access_type(ctx, ACCESS_CACHE);
3956 gen_addr_reg_index(ctx, EA);
3957 val = tcg_temp_new();
3958 /* XXX: specification says this should be treated as a store by the MMU */
3959 gen_qemu_ld8u(ctx, val, EA);
3960 gen_qemu_st8(ctx, val, EA);
3961 tcg_temp_free(val);
3962 tcg_temp_free(EA);
3963 #endif
3966 /* dcdst */
3967 static void gen_dcbst(DisasContext *ctx)
3969 /* XXX: specification say this is treated as a load by the MMU */
3970 TCGv t0;
3971 gen_set_access_type(ctx, ACCESS_CACHE);
3972 t0 = tcg_temp_new();
3973 gen_addr_reg_index(ctx, t0);
3974 gen_qemu_ld8u(ctx, t0, t0);
3975 tcg_temp_free(t0);
3978 /* dcbt */
3979 static void gen_dcbt(DisasContext *ctx)
3981 /* interpreted as no-op */
3982 /* XXX: specification say this is treated as a load by the MMU
3983 * but does not generate any exception
3987 /* dcbtst */
3988 static void gen_dcbtst(DisasContext *ctx)
3990 /* interpreted as no-op */
3991 /* XXX: specification say this is treated as a load by the MMU
3992 * but does not generate any exception
3996 /* dcbz */
3997 static void gen_dcbz(DisasContext *ctx)
3999 TCGv t0;
4000 gen_set_access_type(ctx, ACCESS_CACHE);
4001 /* NIP cannot be restored if the memory exception comes from an helper */
4002 gen_update_nip(ctx, ctx->nip - 4);
4003 t0 = tcg_temp_new();
4004 gen_addr_reg_index(ctx, t0);
4005 gen_helper_dcbz(t0);
4006 tcg_temp_free(t0);
4009 static void gen_dcbz_970(DisasContext *ctx)
4011 TCGv t0;
4012 gen_set_access_type(ctx, ACCESS_CACHE);
4013 /* NIP cannot be restored if the memory exception comes from an helper */
4014 gen_update_nip(ctx, ctx->nip - 4);
4015 t0 = tcg_temp_new();
4016 gen_addr_reg_index(ctx, t0);
4017 if (ctx->opcode & 0x00200000)
4018 gen_helper_dcbz(t0);
4019 else
4020 gen_helper_dcbz_970(t0);
4021 tcg_temp_free(t0);
4024 /* dst / dstt */
4025 static void gen_dst(DisasContext *ctx)
4027 if (rA(ctx->opcode) == 0) {
4028 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4029 } else {
4030 /* interpreted as no-op */
4034 /* dstst /dststt */
4035 static void gen_dstst(DisasContext *ctx)
4037 if (rA(ctx->opcode) == 0) {
4038 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4039 } else {
4040 /* interpreted as no-op */
4045 /* dss / dssall */
4046 static void gen_dss(DisasContext *ctx)
4048 /* interpreted as no-op */
4051 /* icbi */
4052 static void gen_icbi(DisasContext *ctx)
4054 TCGv t0;
4055 gen_set_access_type(ctx, ACCESS_CACHE);
4056 /* NIP cannot be restored if the memory exception comes from an helper */
4057 gen_update_nip(ctx, ctx->nip - 4);
4058 t0 = tcg_temp_new();
4059 gen_addr_reg_index(ctx, t0);
4060 gen_helper_icbi(t0);
4061 tcg_temp_free(t0);
4064 /* Optional: */
4065 /* dcba */
4066 static void gen_dcba(DisasContext *ctx)
4068 /* interpreted as no-op */
4069 /* XXX: specification say this is treated as a store by the MMU
4070 * but does not generate any exception
4074 /*** Segment register manipulation ***/
4075 /* Supervisor only: */
4077 /* mfsr */
4078 static void gen_mfsr(DisasContext *ctx)
4080 #if defined(CONFIG_USER_ONLY)
4081 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4082 #else
4083 TCGv t0;
4084 if (unlikely(!ctx->mem_idx)) {
4085 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4086 return;
4088 t0 = tcg_const_tl(SR(ctx->opcode));
4089 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4090 tcg_temp_free(t0);
4091 #endif
4094 /* mfsrin */
4095 static void gen_mfsrin(DisasContext *ctx)
4097 #if defined(CONFIG_USER_ONLY)
4098 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4099 #else
4100 TCGv t0;
4101 if (unlikely(!ctx->mem_idx)) {
4102 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4103 return;
4105 t0 = tcg_temp_new();
4106 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4107 tcg_gen_andi_tl(t0, t0, 0xF);
4108 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4109 tcg_temp_free(t0);
4110 #endif
4113 /* mtsr */
4114 static void gen_mtsr(DisasContext *ctx)
4116 #if defined(CONFIG_USER_ONLY)
4117 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4118 #else
4119 TCGv t0;
4120 if (unlikely(!ctx->mem_idx)) {
4121 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4122 return;
4124 t0 = tcg_const_tl(SR(ctx->opcode));
4125 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4126 tcg_temp_free(t0);
4127 #endif
4130 /* mtsrin */
4131 static void gen_mtsrin(DisasContext *ctx)
4133 #if defined(CONFIG_USER_ONLY)
4134 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4135 #else
4136 TCGv t0;
4137 if (unlikely(!ctx->mem_idx)) {
4138 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4139 return;
4141 t0 = tcg_temp_new();
4142 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4143 tcg_gen_andi_tl(t0, t0, 0xF);
4144 gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]);
4145 tcg_temp_free(t0);
4146 #endif
4149 #if defined(TARGET_PPC64)
4150 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4152 /* mfsr */
4153 static void gen_mfsr_64b(DisasContext *ctx)
4155 #if defined(CONFIG_USER_ONLY)
4156 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4157 #else
4158 TCGv t0;
4159 if (unlikely(!ctx->mem_idx)) {
4160 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4161 return;
4163 t0 = tcg_const_tl(SR(ctx->opcode));
4164 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4165 tcg_temp_free(t0);
4166 #endif
4169 /* mfsrin */
4170 static void gen_mfsrin_64b(DisasContext *ctx)
4172 #if defined(CONFIG_USER_ONLY)
4173 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4174 #else
4175 TCGv t0;
4176 if (unlikely(!ctx->mem_idx)) {
4177 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4178 return;
4180 t0 = tcg_temp_new();
4181 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4182 tcg_gen_andi_tl(t0, t0, 0xF);
4183 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4184 tcg_temp_free(t0);
4185 #endif
4188 /* mtsr */
4189 static void gen_mtsr_64b(DisasContext *ctx)
4191 #if defined(CONFIG_USER_ONLY)
4192 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4193 #else
4194 TCGv t0;
4195 if (unlikely(!ctx->mem_idx)) {
4196 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4197 return;
4199 t0 = tcg_const_tl(SR(ctx->opcode));
4200 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4201 tcg_temp_free(t0);
4202 #endif
4205 /* mtsrin */
4206 static void gen_mtsrin_64b(DisasContext *ctx)
4208 #if defined(CONFIG_USER_ONLY)
4209 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4210 #else
4211 TCGv t0;
4212 if (unlikely(!ctx->mem_idx)) {
4213 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4214 return;
4216 t0 = tcg_temp_new();
4217 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4218 tcg_gen_andi_tl(t0, t0, 0xF);
4219 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4220 tcg_temp_free(t0);
4221 #endif
4224 /* slbmte */
4225 static void gen_slbmte(DisasContext *ctx)
4227 #if defined(CONFIG_USER_ONLY)
4228 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4229 #else
4230 if (unlikely(!ctx->mem_idx)) {
4231 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4232 return;
4234 gen_helper_store_slb(cpu_gpr[rB(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
4235 #endif
4238 static void gen_slbmfee(DisasContext *ctx)
4240 #if defined(CONFIG_USER_ONLY)
4241 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4242 #else
4243 if (unlikely(!ctx->mem_idx)) {
4244 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4245 return;
4247 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)],
4248 cpu_gpr[rB(ctx->opcode)]);
4249 #endif
4252 static void gen_slbmfev(DisasContext *ctx)
4254 #if defined(CONFIG_USER_ONLY)
4255 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4256 #else
4257 if (unlikely(!ctx->mem_idx)) {
4258 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4259 return;
4261 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)],
4262 cpu_gpr[rB(ctx->opcode)]);
4263 #endif
4265 #endif /* defined(TARGET_PPC64) */
4267 /*** Lookaside buffer management ***/
4268 /* Optional & mem_idx only: */
4270 /* tlbia */
4271 static void gen_tlbia(DisasContext *ctx)
4273 #if defined(CONFIG_USER_ONLY)
4274 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4275 #else
4276 if (unlikely(!ctx->mem_idx)) {
4277 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4278 return;
4280 gen_helper_tlbia();
4281 #endif
4284 /* tlbiel */
4285 static void gen_tlbiel(DisasContext *ctx)
4287 #if defined(CONFIG_USER_ONLY)
4288 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4289 #else
4290 if (unlikely(!ctx->mem_idx)) {
4291 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4292 return;
4294 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4295 #endif
4298 /* tlbie */
4299 static void gen_tlbie(DisasContext *ctx)
4301 #if defined(CONFIG_USER_ONLY)
4302 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4303 #else
4304 if (unlikely(!ctx->mem_idx)) {
4305 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4306 return;
4308 #if defined(TARGET_PPC64)
4309 if (!ctx->sf_mode) {
4310 TCGv t0 = tcg_temp_new();
4311 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4312 gen_helper_tlbie(t0);
4313 tcg_temp_free(t0);
4314 } else
4315 #endif
4316 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4317 #endif
4320 /* tlbsync */
4321 static void gen_tlbsync(DisasContext *ctx)
4323 #if defined(CONFIG_USER_ONLY)
4324 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4325 #else
4326 if (unlikely(!ctx->mem_idx)) {
4327 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4328 return;
4330 /* This has no effect: it should ensure that all previous
4331 * tlbie have completed
4333 gen_stop_exception(ctx);
4334 #endif
4337 #if defined(TARGET_PPC64)
4338 /* slbia */
4339 static void gen_slbia(DisasContext *ctx)
4341 #if defined(CONFIG_USER_ONLY)
4342 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4343 #else
4344 if (unlikely(!ctx->mem_idx)) {
4345 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4346 return;
4348 gen_helper_slbia();
4349 #endif
4352 /* slbie */
4353 static void gen_slbie(DisasContext *ctx)
4355 #if defined(CONFIG_USER_ONLY)
4356 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4357 #else
4358 if (unlikely(!ctx->mem_idx)) {
4359 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4360 return;
4362 gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]);
4363 #endif
4365 #endif
4367 /*** External control ***/
4368 /* Optional: */
4370 /* eciwx */
4371 static void gen_eciwx(DisasContext *ctx)
4373 TCGv t0;
4374 /* Should check EAR[E] ! */
4375 gen_set_access_type(ctx, ACCESS_EXT);
4376 t0 = tcg_temp_new();
4377 gen_addr_reg_index(ctx, t0);
4378 gen_check_align(ctx, t0, 0x03);
4379 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4380 tcg_temp_free(t0);
4383 /* ecowx */
4384 static void gen_ecowx(DisasContext *ctx)
4386 TCGv t0;
4387 /* Should check EAR[E] ! */
4388 gen_set_access_type(ctx, ACCESS_EXT);
4389 t0 = tcg_temp_new();
4390 gen_addr_reg_index(ctx, t0);
4391 gen_check_align(ctx, t0, 0x03);
4392 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4393 tcg_temp_free(t0);
4396 /* PowerPC 601 specific instructions */
4398 /* abs - abs. */
4399 static void gen_abs(DisasContext *ctx)
4401 int l1 = gen_new_label();
4402 int l2 = gen_new_label();
4403 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4404 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4405 tcg_gen_br(l2);
4406 gen_set_label(l1);
4407 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4408 gen_set_label(l2);
4409 if (unlikely(Rc(ctx->opcode) != 0))
4410 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4413 /* abso - abso. */
4414 static void gen_abso(DisasContext *ctx)
4416 int l1 = gen_new_label();
4417 int l2 = gen_new_label();
4418 int l3 = gen_new_label();
4419 /* Start with XER OV disabled, the most likely case */
4420 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4421 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4422 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4423 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4424 tcg_gen_br(l2);
4425 gen_set_label(l1);
4426 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4427 tcg_gen_br(l3);
4428 gen_set_label(l2);
4429 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4430 gen_set_label(l3);
4431 if (unlikely(Rc(ctx->opcode) != 0))
4432 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4435 /* clcs */
4436 static void gen_clcs(DisasContext *ctx)
4438 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4439 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4440 tcg_temp_free_i32(t0);
4441 /* Rc=1 sets CR0 to an undefined state */
4444 /* div - div. */
4445 static void gen_div(DisasContext *ctx)
4447 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4448 if (unlikely(Rc(ctx->opcode) != 0))
4449 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4452 /* divo - divo. */
4453 static void gen_divo(DisasContext *ctx)
4455 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4456 if (unlikely(Rc(ctx->opcode) != 0))
4457 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4460 /* divs - divs. */
4461 static void gen_divs(DisasContext *ctx)
4463 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4464 if (unlikely(Rc(ctx->opcode) != 0))
4465 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4468 /* divso - divso. */
4469 static void gen_divso(DisasContext *ctx)
4471 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4472 if (unlikely(Rc(ctx->opcode) != 0))
4473 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4476 /* doz - doz. */
4477 static void gen_doz(DisasContext *ctx)
4479 int l1 = gen_new_label();
4480 int l2 = gen_new_label();
4481 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4482 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4483 tcg_gen_br(l2);
4484 gen_set_label(l1);
4485 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4486 gen_set_label(l2);
4487 if (unlikely(Rc(ctx->opcode) != 0))
4488 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4491 /* dozo - dozo. */
4492 static void gen_dozo(DisasContext *ctx)
4494 int l1 = gen_new_label();
4495 int l2 = gen_new_label();
4496 TCGv t0 = tcg_temp_new();
4497 TCGv t1 = tcg_temp_new();
4498 TCGv t2 = tcg_temp_new();
4499 /* Start with XER OV disabled, the most likely case */
4500 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4501 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4502 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4503 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4504 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4505 tcg_gen_andc_tl(t1, t1, t2);
4506 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4507 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4508 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4509 tcg_gen_br(l2);
4510 gen_set_label(l1);
4511 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4512 gen_set_label(l2);
4513 tcg_temp_free(t0);
4514 tcg_temp_free(t1);
4515 tcg_temp_free(t2);
4516 if (unlikely(Rc(ctx->opcode) != 0))
4517 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4520 /* dozi */
4521 static void gen_dozi(DisasContext *ctx)
4523 target_long simm = SIMM(ctx->opcode);
4524 int l1 = gen_new_label();
4525 int l2 = gen_new_label();
4526 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4527 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4528 tcg_gen_br(l2);
4529 gen_set_label(l1);
4530 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4531 gen_set_label(l2);
4532 if (unlikely(Rc(ctx->opcode) != 0))
4533 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4536 /* lscbx - lscbx. */
4537 static void gen_lscbx(DisasContext *ctx)
4539 TCGv t0 = tcg_temp_new();
4540 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4541 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4542 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4544 gen_addr_reg_index(ctx, t0);
4545 /* NIP cannot be restored if the memory exception comes from an helper */
4546 gen_update_nip(ctx, ctx->nip - 4);
4547 gen_helper_lscbx(t0, t0, t1, t2, t3);
4548 tcg_temp_free_i32(t1);
4549 tcg_temp_free_i32(t2);
4550 tcg_temp_free_i32(t3);
4551 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4552 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4553 if (unlikely(Rc(ctx->opcode) != 0))
4554 gen_set_Rc0(ctx, t0);
4555 tcg_temp_free(t0);
4558 /* maskg - maskg. */
4559 static void gen_maskg(DisasContext *ctx)
4561 int l1 = gen_new_label();
4562 TCGv t0 = tcg_temp_new();
4563 TCGv t1 = tcg_temp_new();
4564 TCGv t2 = tcg_temp_new();
4565 TCGv t3 = tcg_temp_new();
4566 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4567 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4568 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4569 tcg_gen_addi_tl(t2, t0, 1);
4570 tcg_gen_shr_tl(t2, t3, t2);
4571 tcg_gen_shr_tl(t3, t3, t1);
4572 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4573 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4574 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4575 gen_set_label(l1);
4576 tcg_temp_free(t0);
4577 tcg_temp_free(t1);
4578 tcg_temp_free(t2);
4579 tcg_temp_free(t3);
4580 if (unlikely(Rc(ctx->opcode) != 0))
4581 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4584 /* maskir - maskir. */
4585 static void gen_maskir(DisasContext *ctx)
4587 TCGv t0 = tcg_temp_new();
4588 TCGv t1 = tcg_temp_new();
4589 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4590 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4591 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4592 tcg_temp_free(t0);
4593 tcg_temp_free(t1);
4594 if (unlikely(Rc(ctx->opcode) != 0))
4595 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4598 /* mul - mul. */
4599 static void gen_mul(DisasContext *ctx)
4601 TCGv_i64 t0 = tcg_temp_new_i64();
4602 TCGv_i64 t1 = tcg_temp_new_i64();
4603 TCGv t2 = tcg_temp_new();
4604 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4605 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4606 tcg_gen_mul_i64(t0, t0, t1);
4607 tcg_gen_trunc_i64_tl(t2, t0);
4608 gen_store_spr(SPR_MQ, t2);
4609 tcg_gen_shri_i64(t1, t0, 32);
4610 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4611 tcg_temp_free_i64(t0);
4612 tcg_temp_free_i64(t1);
4613 tcg_temp_free(t2);
4614 if (unlikely(Rc(ctx->opcode) != 0))
4615 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4618 /* mulo - mulo. */
4619 static void gen_mulo(DisasContext *ctx)
4621 int l1 = gen_new_label();
4622 TCGv_i64 t0 = tcg_temp_new_i64();
4623 TCGv_i64 t1 = tcg_temp_new_i64();
4624 TCGv t2 = tcg_temp_new();
4625 /* Start with XER OV disabled, the most likely case */
4626 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4627 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4628 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4629 tcg_gen_mul_i64(t0, t0, t1);
4630 tcg_gen_trunc_i64_tl(t2, t0);
4631 gen_store_spr(SPR_MQ, t2);
4632 tcg_gen_shri_i64(t1, t0, 32);
4633 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4634 tcg_gen_ext32s_i64(t1, t0);
4635 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4636 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4637 gen_set_label(l1);
4638 tcg_temp_free_i64(t0);
4639 tcg_temp_free_i64(t1);
4640 tcg_temp_free(t2);
4641 if (unlikely(Rc(ctx->opcode) != 0))
4642 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4645 /* nabs - nabs. */
4646 static void gen_nabs(DisasContext *ctx)
4648 int l1 = gen_new_label();
4649 int l2 = gen_new_label();
4650 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4651 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4652 tcg_gen_br(l2);
4653 gen_set_label(l1);
4654 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4655 gen_set_label(l2);
4656 if (unlikely(Rc(ctx->opcode) != 0))
4657 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4660 /* nabso - nabso. */
4661 static void gen_nabso(DisasContext *ctx)
4663 int l1 = gen_new_label();
4664 int l2 = gen_new_label();
4665 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4666 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4667 tcg_gen_br(l2);
4668 gen_set_label(l1);
4669 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4670 gen_set_label(l2);
4671 /* nabs never overflows */
4672 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4673 if (unlikely(Rc(ctx->opcode) != 0))
4674 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4677 /* rlmi - rlmi. */
4678 static void gen_rlmi(DisasContext *ctx)
4680 uint32_t mb = MB(ctx->opcode);
4681 uint32_t me = ME(ctx->opcode);
4682 TCGv t0 = tcg_temp_new();
4683 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4684 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4685 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4686 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4687 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4688 tcg_temp_free(t0);
4689 if (unlikely(Rc(ctx->opcode) != 0))
4690 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4693 /* rrib - rrib. */
4694 static void gen_rrib(DisasContext *ctx)
4696 TCGv t0 = tcg_temp_new();
4697 TCGv t1 = tcg_temp_new();
4698 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4699 tcg_gen_movi_tl(t1, 0x80000000);
4700 tcg_gen_shr_tl(t1, t1, t0);
4701 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4702 tcg_gen_and_tl(t0, t0, t1);
4703 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4704 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4705 tcg_temp_free(t0);
4706 tcg_temp_free(t1);
4707 if (unlikely(Rc(ctx->opcode) != 0))
4708 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4711 /* sle - sle. */
4712 static void gen_sle(DisasContext *ctx)
4714 TCGv t0 = tcg_temp_new();
4715 TCGv t1 = tcg_temp_new();
4716 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4717 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4718 tcg_gen_subfi_tl(t1, 32, t1);
4719 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4720 tcg_gen_or_tl(t1, t0, t1);
4721 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4722 gen_store_spr(SPR_MQ, t1);
4723 tcg_temp_free(t0);
4724 tcg_temp_free(t1);
4725 if (unlikely(Rc(ctx->opcode) != 0))
4726 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4729 /* sleq - sleq. */
4730 static void gen_sleq(DisasContext *ctx)
4732 TCGv t0 = tcg_temp_new();
4733 TCGv t1 = tcg_temp_new();
4734 TCGv t2 = tcg_temp_new();
4735 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4736 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4737 tcg_gen_shl_tl(t2, t2, t0);
4738 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4739 gen_load_spr(t1, SPR_MQ);
4740 gen_store_spr(SPR_MQ, t0);
4741 tcg_gen_and_tl(t0, t0, t2);
4742 tcg_gen_andc_tl(t1, t1, t2);
4743 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4744 tcg_temp_free(t0);
4745 tcg_temp_free(t1);
4746 tcg_temp_free(t2);
4747 if (unlikely(Rc(ctx->opcode) != 0))
4748 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4751 /* sliq - sliq. */
4752 static void gen_sliq(DisasContext *ctx)
4754 int sh = SH(ctx->opcode);
4755 TCGv t0 = tcg_temp_new();
4756 TCGv t1 = tcg_temp_new();
4757 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4758 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4759 tcg_gen_or_tl(t1, t0, t1);
4760 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4761 gen_store_spr(SPR_MQ, t1);
4762 tcg_temp_free(t0);
4763 tcg_temp_free(t1);
4764 if (unlikely(Rc(ctx->opcode) != 0))
4765 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4768 /* slliq - slliq. */
4769 static void gen_slliq(DisasContext *ctx)
4771 int sh = SH(ctx->opcode);
4772 TCGv t0 = tcg_temp_new();
4773 TCGv t1 = tcg_temp_new();
4774 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4775 gen_load_spr(t1, SPR_MQ);
4776 gen_store_spr(SPR_MQ, t0);
4777 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4778 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4779 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4780 tcg_temp_free(t0);
4781 tcg_temp_free(t1);
4782 if (unlikely(Rc(ctx->opcode) != 0))
4783 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4786 /* sllq - sllq. */
4787 static void gen_sllq(DisasContext *ctx)
4789 int l1 = gen_new_label();
4790 int l2 = gen_new_label();
4791 TCGv t0 = tcg_temp_local_new();
4792 TCGv t1 = tcg_temp_local_new();
4793 TCGv t2 = tcg_temp_local_new();
4794 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4795 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4796 tcg_gen_shl_tl(t1, t1, t2);
4797 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4798 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4799 gen_load_spr(t0, SPR_MQ);
4800 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4801 tcg_gen_br(l2);
4802 gen_set_label(l1);
4803 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4804 gen_load_spr(t2, SPR_MQ);
4805 tcg_gen_andc_tl(t1, t2, t1);
4806 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4807 gen_set_label(l2);
4808 tcg_temp_free(t0);
4809 tcg_temp_free(t1);
4810 tcg_temp_free(t2);
4811 if (unlikely(Rc(ctx->opcode) != 0))
4812 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4815 /* slq - slq. */
4816 static void gen_slq(DisasContext *ctx)
4818 int l1 = gen_new_label();
4819 TCGv t0 = tcg_temp_new();
4820 TCGv t1 = tcg_temp_new();
4821 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4822 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4823 tcg_gen_subfi_tl(t1, 32, t1);
4824 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4825 tcg_gen_or_tl(t1, t0, t1);
4826 gen_store_spr(SPR_MQ, t1);
4827 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4828 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4829 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4830 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4831 gen_set_label(l1);
4832 tcg_temp_free(t0);
4833 tcg_temp_free(t1);
4834 if (unlikely(Rc(ctx->opcode) != 0))
4835 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4838 /* sraiq - sraiq. */
4839 static void gen_sraiq(DisasContext *ctx)
4841 int sh = SH(ctx->opcode);
4842 int l1 = gen_new_label();
4843 TCGv t0 = tcg_temp_new();
4844 TCGv t1 = tcg_temp_new();
4845 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4846 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4847 tcg_gen_or_tl(t0, t0, t1);
4848 gen_store_spr(SPR_MQ, t0);
4849 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4850 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4851 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4852 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4853 gen_set_label(l1);
4854 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4855 tcg_temp_free(t0);
4856 tcg_temp_free(t1);
4857 if (unlikely(Rc(ctx->opcode) != 0))
4858 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4861 /* sraq - sraq. */
4862 static void gen_sraq(DisasContext *ctx)
4864 int l1 = gen_new_label();
4865 int l2 = gen_new_label();
4866 TCGv t0 = tcg_temp_new();
4867 TCGv t1 = tcg_temp_local_new();
4868 TCGv t2 = tcg_temp_local_new();
4869 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4870 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4871 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4872 tcg_gen_subfi_tl(t2, 32, t2);
4873 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4874 tcg_gen_or_tl(t0, t0, t2);
4875 gen_store_spr(SPR_MQ, t0);
4876 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4877 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4878 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4879 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4880 gen_set_label(l1);
4881 tcg_temp_free(t0);
4882 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4883 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4884 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4885 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4886 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4887 gen_set_label(l2);
4888 tcg_temp_free(t1);
4889 tcg_temp_free(t2);
4890 if (unlikely(Rc(ctx->opcode) != 0))
4891 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4894 /* sre - sre. */
4895 static void gen_sre(DisasContext *ctx)
4897 TCGv t0 = tcg_temp_new();
4898 TCGv t1 = tcg_temp_new();
4899 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4900 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4901 tcg_gen_subfi_tl(t1, 32, t1);
4902 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4903 tcg_gen_or_tl(t1, t0, t1);
4904 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4905 gen_store_spr(SPR_MQ, t1);
4906 tcg_temp_free(t0);
4907 tcg_temp_free(t1);
4908 if (unlikely(Rc(ctx->opcode) != 0))
4909 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4912 /* srea - srea. */
4913 static void gen_srea(DisasContext *ctx)
4915 TCGv t0 = tcg_temp_new();
4916 TCGv t1 = tcg_temp_new();
4917 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4918 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4919 gen_store_spr(SPR_MQ, t0);
4920 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4921 tcg_temp_free(t0);
4922 tcg_temp_free(t1);
4923 if (unlikely(Rc(ctx->opcode) != 0))
4924 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4927 /* sreq */
4928 static void gen_sreq(DisasContext *ctx)
4930 TCGv t0 = tcg_temp_new();
4931 TCGv t1 = tcg_temp_new();
4932 TCGv t2 = tcg_temp_new();
4933 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4934 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4935 tcg_gen_shr_tl(t1, t1, t0);
4936 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4937 gen_load_spr(t2, SPR_MQ);
4938 gen_store_spr(SPR_MQ, t0);
4939 tcg_gen_and_tl(t0, t0, t1);
4940 tcg_gen_andc_tl(t2, t2, t1);
4941 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
4942 tcg_temp_free(t0);
4943 tcg_temp_free(t1);
4944 tcg_temp_free(t2);
4945 if (unlikely(Rc(ctx->opcode) != 0))
4946 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4949 /* sriq */
4950 static void gen_sriq(DisasContext *ctx)
4952 int sh = SH(ctx->opcode);
4953 TCGv t0 = tcg_temp_new();
4954 TCGv t1 = tcg_temp_new();
4955 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4956 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4957 tcg_gen_or_tl(t1, t0, t1);
4958 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4959 gen_store_spr(SPR_MQ, t1);
4960 tcg_temp_free(t0);
4961 tcg_temp_free(t1);
4962 if (unlikely(Rc(ctx->opcode) != 0))
4963 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4966 /* srliq */
4967 static void gen_srliq(DisasContext *ctx)
4969 int sh = SH(ctx->opcode);
4970 TCGv t0 = tcg_temp_new();
4971 TCGv t1 = tcg_temp_new();
4972 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4973 gen_load_spr(t1, SPR_MQ);
4974 gen_store_spr(SPR_MQ, t0);
4975 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
4976 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
4977 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4978 tcg_temp_free(t0);
4979 tcg_temp_free(t1);
4980 if (unlikely(Rc(ctx->opcode) != 0))
4981 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4984 /* srlq */
4985 static void gen_srlq(DisasContext *ctx)
4987 int l1 = gen_new_label();
4988 int l2 = gen_new_label();
4989 TCGv t0 = tcg_temp_local_new();
4990 TCGv t1 = tcg_temp_local_new();
4991 TCGv t2 = tcg_temp_local_new();
4992 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4993 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4994 tcg_gen_shr_tl(t2, t1, t2);
4995 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4996 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4997 gen_load_spr(t0, SPR_MQ);
4998 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
4999 tcg_gen_br(l2);
5000 gen_set_label(l1);
5001 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5002 tcg_gen_and_tl(t0, t0, t2);
5003 gen_load_spr(t1, SPR_MQ);
5004 tcg_gen_andc_tl(t1, t1, t2);
5005 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5006 gen_set_label(l2);
5007 tcg_temp_free(t0);
5008 tcg_temp_free(t1);
5009 tcg_temp_free(t2);
5010 if (unlikely(Rc(ctx->opcode) != 0))
5011 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5014 /* srq */
5015 static void gen_srq(DisasContext *ctx)
5017 int l1 = gen_new_label();
5018 TCGv t0 = tcg_temp_new();
5019 TCGv t1 = tcg_temp_new();
5020 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5021 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5022 tcg_gen_subfi_tl(t1, 32, t1);
5023 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5024 tcg_gen_or_tl(t1, t0, t1);
5025 gen_store_spr(SPR_MQ, t1);
5026 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5027 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5028 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5029 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5030 gen_set_label(l1);
5031 tcg_temp_free(t0);
5032 tcg_temp_free(t1);
5033 if (unlikely(Rc(ctx->opcode) != 0))
5034 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5037 /* PowerPC 602 specific instructions */
5039 /* dsa */
5040 static void gen_dsa(DisasContext *ctx)
5042 /* XXX: TODO */
5043 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5046 /* esa */
5047 static void gen_esa(DisasContext *ctx)
5049 /* XXX: TODO */
5050 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5053 /* mfrom */
5054 static void gen_mfrom(DisasContext *ctx)
5056 #if defined(CONFIG_USER_ONLY)
5057 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5058 #else
5059 if (unlikely(!ctx->mem_idx)) {
5060 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5061 return;
5063 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5064 #endif
5067 /* 602 - 603 - G2 TLB management */
5069 /* tlbld */
5070 static void gen_tlbld_6xx(DisasContext *ctx)
5072 #if defined(CONFIG_USER_ONLY)
5073 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5074 #else
5075 if (unlikely(!ctx->mem_idx)) {
5076 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5077 return;
5079 gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5080 #endif
5083 /* tlbli */
5084 static void gen_tlbli_6xx(DisasContext *ctx)
5086 #if defined(CONFIG_USER_ONLY)
5087 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5088 #else
5089 if (unlikely(!ctx->mem_idx)) {
5090 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5091 return;
5093 gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5094 #endif
5097 /* 74xx TLB management */
5099 /* tlbld */
5100 static void gen_tlbld_74xx(DisasContext *ctx)
5102 #if defined(CONFIG_USER_ONLY)
5103 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5104 #else
5105 if (unlikely(!ctx->mem_idx)) {
5106 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5107 return;
5109 gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5110 #endif
5113 /* tlbli */
5114 static void gen_tlbli_74xx(DisasContext *ctx)
5116 #if defined(CONFIG_USER_ONLY)
5117 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5118 #else
5119 if (unlikely(!ctx->mem_idx)) {
5120 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5121 return;
5123 gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5124 #endif
5127 /* POWER instructions not in PowerPC 601 */
5129 /* clf */
5130 static void gen_clf(DisasContext *ctx)
5132 /* Cache line flush: implemented as no-op */
5135 /* cli */
5136 static void gen_cli(DisasContext *ctx)
5138 /* Cache line invalidate: privileged and treated as no-op */
5139 #if defined(CONFIG_USER_ONLY)
5140 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5141 #else
5142 if (unlikely(!ctx->mem_idx)) {
5143 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5144 return;
5146 #endif
5149 /* dclst */
5150 static void gen_dclst(DisasContext *ctx)
5152 /* Data cache line store: treated as no-op */
5155 static void gen_mfsri(DisasContext *ctx)
5157 #if defined(CONFIG_USER_ONLY)
5158 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5159 #else
5160 int ra = rA(ctx->opcode);
5161 int rd = rD(ctx->opcode);
5162 TCGv t0;
5163 if (unlikely(!ctx->mem_idx)) {
5164 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5165 return;
5167 t0 = tcg_temp_new();
5168 gen_addr_reg_index(ctx, t0);
5169 tcg_gen_shri_tl(t0, t0, 28);
5170 tcg_gen_andi_tl(t0, t0, 0xF);
5171 gen_helper_load_sr(cpu_gpr[rd], t0);
5172 tcg_temp_free(t0);
5173 if (ra != 0 && ra != rd)
5174 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5175 #endif
5178 static void gen_rac(DisasContext *ctx)
5180 #if defined(CONFIG_USER_ONLY)
5181 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5182 #else
5183 TCGv t0;
5184 if (unlikely(!ctx->mem_idx)) {
5185 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5186 return;
5188 t0 = tcg_temp_new();
5189 gen_addr_reg_index(ctx, t0);
5190 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5191 tcg_temp_free(t0);
5192 #endif
5195 static void gen_rfsvc(DisasContext *ctx)
5197 #if defined(CONFIG_USER_ONLY)
5198 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5199 #else
5200 if (unlikely(!ctx->mem_idx)) {
5201 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5202 return;
5204 gen_helper_rfsvc();
5205 gen_sync_exception(ctx);
5206 #endif
5209 /* svc is not implemented for now */
5211 /* POWER2 specific instructions */
5212 /* Quad manipulation (load/store two floats at a time) */
5214 /* lfq */
5215 static void gen_lfq(DisasContext *ctx)
5217 int rd = rD(ctx->opcode);
5218 TCGv t0;
5219 gen_set_access_type(ctx, ACCESS_FLOAT);
5220 t0 = tcg_temp_new();
5221 gen_addr_imm_index(ctx, t0, 0);
5222 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5223 gen_addr_add(ctx, t0, t0, 8);
5224 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5225 tcg_temp_free(t0);
5228 /* lfqu */
5229 static void gen_lfqu(DisasContext *ctx)
5231 int ra = rA(ctx->opcode);
5232 int rd = rD(ctx->opcode);
5233 TCGv t0, t1;
5234 gen_set_access_type(ctx, ACCESS_FLOAT);
5235 t0 = tcg_temp_new();
5236 t1 = tcg_temp_new();
5237 gen_addr_imm_index(ctx, t0, 0);
5238 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5239 gen_addr_add(ctx, t1, t0, 8);
5240 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5241 if (ra != 0)
5242 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5243 tcg_temp_free(t0);
5244 tcg_temp_free(t1);
5247 /* lfqux */
5248 static void gen_lfqux(DisasContext *ctx)
5250 int ra = rA(ctx->opcode);
5251 int rd = rD(ctx->opcode);
5252 gen_set_access_type(ctx, ACCESS_FLOAT);
5253 TCGv t0, t1;
5254 t0 = tcg_temp_new();
5255 gen_addr_reg_index(ctx, t0);
5256 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5257 t1 = tcg_temp_new();
5258 gen_addr_add(ctx, t1, t0, 8);
5259 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5260 tcg_temp_free(t1);
5261 if (ra != 0)
5262 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5263 tcg_temp_free(t0);
5266 /* lfqx */
5267 static void gen_lfqx(DisasContext *ctx)
5269 int rd = rD(ctx->opcode);
5270 TCGv t0;
5271 gen_set_access_type(ctx, ACCESS_FLOAT);
5272 t0 = tcg_temp_new();
5273 gen_addr_reg_index(ctx, t0);
5274 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5275 gen_addr_add(ctx, t0, t0, 8);
5276 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5277 tcg_temp_free(t0);
5280 /* stfq */
5281 static void gen_stfq(DisasContext *ctx)
5283 int rd = rD(ctx->opcode);
5284 TCGv t0;
5285 gen_set_access_type(ctx, ACCESS_FLOAT);
5286 t0 = tcg_temp_new();
5287 gen_addr_imm_index(ctx, t0, 0);
5288 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5289 gen_addr_add(ctx, t0, t0, 8);
5290 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5291 tcg_temp_free(t0);
5294 /* stfqu */
5295 static void gen_stfqu(DisasContext *ctx)
5297 int ra = rA(ctx->opcode);
5298 int rd = rD(ctx->opcode);
5299 TCGv t0, t1;
5300 gen_set_access_type(ctx, ACCESS_FLOAT);
5301 t0 = tcg_temp_new();
5302 gen_addr_imm_index(ctx, t0, 0);
5303 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5304 t1 = tcg_temp_new();
5305 gen_addr_add(ctx, t1, t0, 8);
5306 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5307 tcg_temp_free(t1);
5308 if (ra != 0)
5309 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5310 tcg_temp_free(t0);
5313 /* stfqux */
5314 static void gen_stfqux(DisasContext *ctx)
5316 int ra = rA(ctx->opcode);
5317 int rd = rD(ctx->opcode);
5318 TCGv t0, t1;
5319 gen_set_access_type(ctx, ACCESS_FLOAT);
5320 t0 = tcg_temp_new();
5321 gen_addr_reg_index(ctx, t0);
5322 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5323 t1 = tcg_temp_new();
5324 gen_addr_add(ctx, t1, t0, 8);
5325 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5326 tcg_temp_free(t1);
5327 if (ra != 0)
5328 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5329 tcg_temp_free(t0);
5332 /* stfqx */
5333 static void gen_stfqx(DisasContext *ctx)
5335 int rd = rD(ctx->opcode);
5336 TCGv t0;
5337 gen_set_access_type(ctx, ACCESS_FLOAT);
5338 t0 = tcg_temp_new();
5339 gen_addr_reg_index(ctx, t0);
5340 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5341 gen_addr_add(ctx, t0, t0, 8);
5342 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5343 tcg_temp_free(t0);
5346 /* BookE specific instructions */
5348 /* XXX: not implemented on 440 ? */
5349 static void gen_mfapidi(DisasContext *ctx)
5351 /* XXX: TODO */
5352 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5355 /* XXX: not implemented on 440 ? */
5356 static void gen_tlbiva(DisasContext *ctx)
5358 #if defined(CONFIG_USER_ONLY)
5359 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5360 #else
5361 TCGv t0;
5362 if (unlikely(!ctx->mem_idx)) {
5363 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5364 return;
5366 t0 = tcg_temp_new();
5367 gen_addr_reg_index(ctx, t0);
5368 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
5369 tcg_temp_free(t0);
5370 #endif
5373 /* All 405 MAC instructions are translated here */
5374 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5375 int ra, int rb, int rt, int Rc)
5377 TCGv t0, t1;
5379 t0 = tcg_temp_local_new();
5380 t1 = tcg_temp_local_new();
5382 switch (opc3 & 0x0D) {
5383 case 0x05:
5384 /* macchw - macchw. - macchwo - macchwo. */
5385 /* macchws - macchws. - macchwso - macchwso. */
5386 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5387 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5388 /* mulchw - mulchw. */
5389 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5390 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5391 tcg_gen_ext16s_tl(t1, t1);
5392 break;
5393 case 0x04:
5394 /* macchwu - macchwu. - macchwuo - macchwuo. */
5395 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5396 /* mulchwu - mulchwu. */
5397 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5398 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5399 tcg_gen_ext16u_tl(t1, t1);
5400 break;
5401 case 0x01:
5402 /* machhw - machhw. - machhwo - machhwo. */
5403 /* machhws - machhws. - machhwso - machhwso. */
5404 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5405 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5406 /* mulhhw - mulhhw. */
5407 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5408 tcg_gen_ext16s_tl(t0, t0);
5409 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5410 tcg_gen_ext16s_tl(t1, t1);
5411 break;
5412 case 0x00:
5413 /* machhwu - machhwu. - machhwuo - machhwuo. */
5414 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5415 /* mulhhwu - mulhhwu. */
5416 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5417 tcg_gen_ext16u_tl(t0, t0);
5418 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5419 tcg_gen_ext16u_tl(t1, t1);
5420 break;
5421 case 0x0D:
5422 /* maclhw - maclhw. - maclhwo - maclhwo. */
5423 /* maclhws - maclhws. - maclhwso - maclhwso. */
5424 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5425 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5426 /* mullhw - mullhw. */
5427 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5428 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5429 break;
5430 case 0x0C:
5431 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5432 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5433 /* mullhwu - mullhwu. */
5434 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5435 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5436 break;
5438 if (opc2 & 0x04) {
5439 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5440 tcg_gen_mul_tl(t1, t0, t1);
5441 if (opc2 & 0x02) {
5442 /* nmultiply-and-accumulate (0x0E) */
5443 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5444 } else {
5445 /* multiply-and-accumulate (0x0C) */
5446 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5449 if (opc3 & 0x12) {
5450 /* Check overflow and/or saturate */
5451 int l1 = gen_new_label();
5453 if (opc3 & 0x10) {
5454 /* Start with XER OV disabled, the most likely case */
5455 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5457 if (opc3 & 0x01) {
5458 /* Signed */
5459 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5460 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5461 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5462 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5463 if (opc3 & 0x02) {
5464 /* Saturate */
5465 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5466 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5468 } else {
5469 /* Unsigned */
5470 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5471 if (opc3 & 0x02) {
5472 /* Saturate */
5473 tcg_gen_movi_tl(t0, UINT32_MAX);
5476 if (opc3 & 0x10) {
5477 /* Check overflow */
5478 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5480 gen_set_label(l1);
5481 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5483 } else {
5484 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5486 tcg_temp_free(t0);
5487 tcg_temp_free(t1);
5488 if (unlikely(Rc) != 0) {
5489 /* Update Rc0 */
5490 gen_set_Rc0(ctx, cpu_gpr[rt]);
5494 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5495 static void glue(gen_, name)(DisasContext *ctx) \
5497 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5498 rD(ctx->opcode), Rc(ctx->opcode)); \
5501 /* macchw - macchw. */
5502 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5503 /* macchwo - macchwo. */
5504 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5505 /* macchws - macchws. */
5506 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5507 /* macchwso - macchwso. */
5508 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5509 /* macchwsu - macchwsu. */
5510 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5511 /* macchwsuo - macchwsuo. */
5512 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5513 /* macchwu - macchwu. */
5514 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5515 /* macchwuo - macchwuo. */
5516 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5517 /* machhw - machhw. */
5518 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5519 /* machhwo - machhwo. */
5520 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5521 /* machhws - machhws. */
5522 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5523 /* machhwso - machhwso. */
5524 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5525 /* machhwsu - machhwsu. */
5526 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5527 /* machhwsuo - machhwsuo. */
5528 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5529 /* machhwu - machhwu. */
5530 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5531 /* machhwuo - machhwuo. */
5532 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5533 /* maclhw - maclhw. */
5534 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5535 /* maclhwo - maclhwo. */
5536 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5537 /* maclhws - maclhws. */
5538 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5539 /* maclhwso - maclhwso. */
5540 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5541 /* maclhwu - maclhwu. */
5542 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5543 /* maclhwuo - maclhwuo. */
5544 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5545 /* maclhwsu - maclhwsu. */
5546 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5547 /* maclhwsuo - maclhwsuo. */
5548 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5549 /* nmacchw - nmacchw. */
5550 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5551 /* nmacchwo - nmacchwo. */
5552 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5553 /* nmacchws - nmacchws. */
5554 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5555 /* nmacchwso - nmacchwso. */
5556 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5557 /* nmachhw - nmachhw. */
5558 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5559 /* nmachhwo - nmachhwo. */
5560 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5561 /* nmachhws - nmachhws. */
5562 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5563 /* nmachhwso - nmachhwso. */
5564 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5565 /* nmaclhw - nmaclhw. */
5566 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5567 /* nmaclhwo - nmaclhwo. */
5568 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5569 /* nmaclhws - nmaclhws. */
5570 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5571 /* nmaclhwso - nmaclhwso. */
5572 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5574 /* mulchw - mulchw. */
5575 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5576 /* mulchwu - mulchwu. */
5577 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5578 /* mulhhw - mulhhw. */
5579 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5580 /* mulhhwu - mulhhwu. */
5581 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5582 /* mullhw - mullhw. */
5583 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5584 /* mullhwu - mullhwu. */
5585 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5587 /* mfdcr */
5588 static void gen_mfdcr(DisasContext *ctx)
5590 #if defined(CONFIG_USER_ONLY)
5591 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5592 #else
5593 TCGv dcrn;
5594 if (unlikely(!ctx->mem_idx)) {
5595 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5596 return;
5598 /* NIP cannot be restored if the memory exception comes from an helper */
5599 gen_update_nip(ctx, ctx->nip - 4);
5600 dcrn = tcg_const_tl(SPR(ctx->opcode));
5601 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5602 tcg_temp_free(dcrn);
5603 #endif
5606 /* mtdcr */
5607 static void gen_mtdcr(DisasContext *ctx)
5609 #if defined(CONFIG_USER_ONLY)
5610 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5611 #else
5612 TCGv dcrn;
5613 if (unlikely(!ctx->mem_idx)) {
5614 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5615 return;
5617 /* NIP cannot be restored if the memory exception comes from an helper */
5618 gen_update_nip(ctx, ctx->nip - 4);
5619 dcrn = tcg_const_tl(SPR(ctx->opcode));
5620 gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5621 tcg_temp_free(dcrn);
5622 #endif
5625 /* mfdcrx */
5626 /* XXX: not implemented on 440 ? */
5627 static void gen_mfdcrx(DisasContext *ctx)
5629 #if defined(CONFIG_USER_ONLY)
5630 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5631 #else
5632 if (unlikely(!ctx->mem_idx)) {
5633 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5634 return;
5636 /* NIP cannot be restored if the memory exception comes from an helper */
5637 gen_update_nip(ctx, ctx->nip - 4);
5638 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5639 /* Note: Rc update flag set leads to undefined state of Rc0 */
5640 #endif
5643 /* mtdcrx */
5644 /* XXX: not implemented on 440 ? */
5645 static void gen_mtdcrx(DisasContext *ctx)
5647 #if defined(CONFIG_USER_ONLY)
5648 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5649 #else
5650 if (unlikely(!ctx->mem_idx)) {
5651 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5652 return;
5654 /* NIP cannot be restored if the memory exception comes from an helper */
5655 gen_update_nip(ctx, ctx->nip - 4);
5656 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5657 /* Note: Rc update flag set leads to undefined state of Rc0 */
5658 #endif
5661 /* mfdcrux (PPC 460) : user-mode access to DCR */
5662 static void gen_mfdcrux(DisasContext *ctx)
5664 /* NIP cannot be restored if the memory exception comes from an helper */
5665 gen_update_nip(ctx, ctx->nip - 4);
5666 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5667 /* Note: Rc update flag set leads to undefined state of Rc0 */
5670 /* mtdcrux (PPC 460) : user-mode access to DCR */
5671 static void gen_mtdcrux(DisasContext *ctx)
5673 /* NIP cannot be restored if the memory exception comes from an helper */
5674 gen_update_nip(ctx, ctx->nip - 4);
5675 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5676 /* Note: Rc update flag set leads to undefined state of Rc0 */
5679 /* dccci */
5680 static void gen_dccci(DisasContext *ctx)
5682 #if defined(CONFIG_USER_ONLY)
5683 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5684 #else
5685 if (unlikely(!ctx->mem_idx)) {
5686 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5687 return;
5689 /* interpreted as no-op */
5690 #endif
5693 /* dcread */
5694 static void gen_dcread(DisasContext *ctx)
5696 #if defined(CONFIG_USER_ONLY)
5697 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5698 #else
5699 TCGv EA, val;
5700 if (unlikely(!ctx->mem_idx)) {
5701 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5702 return;
5704 gen_set_access_type(ctx, ACCESS_CACHE);
5705 EA = tcg_temp_new();
5706 gen_addr_reg_index(ctx, EA);
5707 val = tcg_temp_new();
5708 gen_qemu_ld32u(ctx, val, EA);
5709 tcg_temp_free(val);
5710 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5711 tcg_temp_free(EA);
5712 #endif
5715 /* icbt */
5716 static void gen_icbt_40x(DisasContext *ctx)
5718 /* interpreted as no-op */
5719 /* XXX: specification say this is treated as a load by the MMU
5720 * but does not generate any exception
5724 /* iccci */
5725 static void gen_iccci(DisasContext *ctx)
5727 #if defined(CONFIG_USER_ONLY)
5728 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5729 #else
5730 if (unlikely(!ctx->mem_idx)) {
5731 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5732 return;
5734 /* interpreted as no-op */
5735 #endif
5738 /* icread */
5739 static void gen_icread(DisasContext *ctx)
5741 #if defined(CONFIG_USER_ONLY)
5742 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5743 #else
5744 if (unlikely(!ctx->mem_idx)) {
5745 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5746 return;
5748 /* interpreted as no-op */
5749 #endif
5752 /* rfci (mem_idx only) */
5753 static void gen_rfci_40x(DisasContext *ctx)
5755 #if defined(CONFIG_USER_ONLY)
5756 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5757 #else
5758 if (unlikely(!ctx->mem_idx)) {
5759 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5760 return;
5762 /* Restore CPU state */
5763 gen_helper_40x_rfci();
5764 gen_sync_exception(ctx);
5765 #endif
5768 static void gen_rfci(DisasContext *ctx)
5770 #if defined(CONFIG_USER_ONLY)
5771 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5772 #else
5773 if (unlikely(!ctx->mem_idx)) {
5774 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5775 return;
5777 /* Restore CPU state */
5778 gen_helper_rfci();
5779 gen_sync_exception(ctx);
5780 #endif
5783 /* BookE specific */
5785 /* XXX: not implemented on 440 ? */
5786 static void gen_rfdi(DisasContext *ctx)
5788 #if defined(CONFIG_USER_ONLY)
5789 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5790 #else
5791 if (unlikely(!ctx->mem_idx)) {
5792 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5793 return;
5795 /* Restore CPU state */
5796 gen_helper_rfdi();
5797 gen_sync_exception(ctx);
5798 #endif
5801 /* XXX: not implemented on 440 ? */
5802 static void gen_rfmci(DisasContext *ctx)
5804 #if defined(CONFIG_USER_ONLY)
5805 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5806 #else
5807 if (unlikely(!ctx->mem_idx)) {
5808 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5809 return;
5811 /* Restore CPU state */
5812 gen_helper_rfmci();
5813 gen_sync_exception(ctx);
5814 #endif
5817 /* TLB management - PowerPC 405 implementation */
5819 /* tlbre */
5820 static void gen_tlbre_40x(DisasContext *ctx)
5822 #if defined(CONFIG_USER_ONLY)
5823 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5824 #else
5825 if (unlikely(!ctx->mem_idx)) {
5826 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5827 return;
5829 switch (rB(ctx->opcode)) {
5830 case 0:
5831 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5832 break;
5833 case 1:
5834 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5835 break;
5836 default:
5837 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5838 break;
5840 #endif
5843 /* tlbsx - tlbsx. */
5844 static void gen_tlbsx_40x(DisasContext *ctx)
5846 #if defined(CONFIG_USER_ONLY)
5847 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5848 #else
5849 TCGv t0;
5850 if (unlikely(!ctx->mem_idx)) {
5851 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5852 return;
5854 t0 = tcg_temp_new();
5855 gen_addr_reg_index(ctx, t0);
5856 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5857 tcg_temp_free(t0);
5858 if (Rc(ctx->opcode)) {
5859 int l1 = gen_new_label();
5860 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5861 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5862 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5863 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5864 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5865 gen_set_label(l1);
5867 #endif
5870 /* tlbwe */
5871 static void gen_tlbwe_40x(DisasContext *ctx)
5873 #if defined(CONFIG_USER_ONLY)
5874 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5875 #else
5876 if (unlikely(!ctx->mem_idx)) {
5877 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5878 return;
5880 switch (rB(ctx->opcode)) {
5881 case 0:
5882 gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5883 break;
5884 case 1:
5885 gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5886 break;
5887 default:
5888 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5889 break;
5891 #endif
5894 /* TLB management - PowerPC 440 implementation */
5896 /* tlbre */
5897 static void gen_tlbre_440(DisasContext *ctx)
5899 #if defined(CONFIG_USER_ONLY)
5900 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5901 #else
5902 if (unlikely(!ctx->mem_idx)) {
5903 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5904 return;
5906 switch (rB(ctx->opcode)) {
5907 case 0:
5908 case 1:
5909 case 2:
5911 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5912 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], t0, cpu_gpr[rA(ctx->opcode)]);
5913 tcg_temp_free_i32(t0);
5915 break;
5916 default:
5917 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5918 break;
5920 #endif
5923 /* tlbsx - tlbsx. */
5924 static void gen_tlbsx_440(DisasContext *ctx)
5926 #if defined(CONFIG_USER_ONLY)
5927 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5928 #else
5929 TCGv t0;
5930 if (unlikely(!ctx->mem_idx)) {
5931 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5932 return;
5934 t0 = tcg_temp_new();
5935 gen_addr_reg_index(ctx, t0);
5936 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5937 tcg_temp_free(t0);
5938 if (Rc(ctx->opcode)) {
5939 int l1 = gen_new_label();
5940 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5941 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5942 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5943 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5944 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5945 gen_set_label(l1);
5947 #endif
5950 /* tlbwe */
5951 static void gen_tlbwe_440(DisasContext *ctx)
5953 #if defined(CONFIG_USER_ONLY)
5954 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5955 #else
5956 if (unlikely(!ctx->mem_idx)) {
5957 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5958 return;
5960 switch (rB(ctx->opcode)) {
5961 case 0:
5962 case 1:
5963 case 2:
5965 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5966 gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5967 tcg_temp_free_i32(t0);
5969 break;
5970 default:
5971 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5972 break;
5974 #endif
5977 /* wrtee */
5978 static void gen_wrtee(DisasContext *ctx)
5980 #if defined(CONFIG_USER_ONLY)
5981 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5982 #else
5983 TCGv t0;
5984 if (unlikely(!ctx->mem_idx)) {
5985 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5986 return;
5988 t0 = tcg_temp_new();
5989 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5990 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5991 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5992 tcg_temp_free(t0);
5993 /* Stop translation to have a chance to raise an exception
5994 * if we just set msr_ee to 1
5996 gen_stop_exception(ctx);
5997 #endif
6000 /* wrteei */
6001 static void gen_wrteei(DisasContext *ctx)
6003 #if defined(CONFIG_USER_ONLY)
6004 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6005 #else
6006 if (unlikely(!ctx->mem_idx)) {
6007 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6008 return;
6010 if (ctx->opcode & 0x00008000) {
6011 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6012 /* Stop translation to have a chance to raise an exception */
6013 gen_stop_exception(ctx);
6014 } else {
6015 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6017 #endif
6020 /* PowerPC 440 specific instructions */
6022 /* dlmzb */
6023 static void gen_dlmzb(DisasContext *ctx)
6025 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6026 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6027 cpu_gpr[rB(ctx->opcode)], t0);
6028 tcg_temp_free_i32(t0);
6031 /* mbar replaces eieio on 440 */
6032 static void gen_mbar(DisasContext *ctx)
6034 /* interpreted as no-op */
6037 /* msync replaces sync on 440 */
6038 static void gen_msync(DisasContext *ctx)
6040 /* interpreted as no-op */
6043 /* icbt */
6044 static void gen_icbt_440(DisasContext *ctx)
6046 /* interpreted as no-op */
6047 /* XXX: specification say this is treated as a load by the MMU
6048 * but does not generate any exception
6052 /*** Altivec vector extension ***/
6053 /* Altivec registers moves */
6055 static inline TCGv_ptr gen_avr_ptr(int reg)
6057 TCGv_ptr r = tcg_temp_new_ptr();
6058 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6059 return r;
6062 #define GEN_VR_LDX(name, opc2, opc3) \
6063 static void glue(gen_, name)(DisasContext *ctx) \
6065 TCGv EA; \
6066 if (unlikely(!ctx->altivec_enabled)) { \
6067 gen_exception(ctx, POWERPC_EXCP_VPU); \
6068 return; \
6070 gen_set_access_type(ctx, ACCESS_INT); \
6071 EA = tcg_temp_new(); \
6072 gen_addr_reg_index(ctx, EA); \
6073 tcg_gen_andi_tl(EA, EA, ~0xf); \
6074 if (ctx->le_mode) { \
6075 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6076 tcg_gen_addi_tl(EA, EA, 8); \
6077 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6078 } else { \
6079 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6080 tcg_gen_addi_tl(EA, EA, 8); \
6081 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6083 tcg_temp_free(EA); \
6086 #define GEN_VR_STX(name, opc2, opc3) \
6087 static void gen_st##name(DisasContext *ctx) \
6089 TCGv EA; \
6090 if (unlikely(!ctx->altivec_enabled)) { \
6091 gen_exception(ctx, POWERPC_EXCP_VPU); \
6092 return; \
6094 gen_set_access_type(ctx, ACCESS_INT); \
6095 EA = tcg_temp_new(); \
6096 gen_addr_reg_index(ctx, EA); \
6097 tcg_gen_andi_tl(EA, EA, ~0xf); \
6098 if (ctx->le_mode) { \
6099 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6100 tcg_gen_addi_tl(EA, EA, 8); \
6101 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6102 } else { \
6103 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6104 tcg_gen_addi_tl(EA, EA, 8); \
6105 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6107 tcg_temp_free(EA); \
6110 #define GEN_VR_LVE(name, opc2, opc3) \
6111 static void gen_lve##name(DisasContext *ctx) \
6113 TCGv EA; \
6114 TCGv_ptr rs; \
6115 if (unlikely(!ctx->altivec_enabled)) { \
6116 gen_exception(ctx, POWERPC_EXCP_VPU); \
6117 return; \
6119 gen_set_access_type(ctx, ACCESS_INT); \
6120 EA = tcg_temp_new(); \
6121 gen_addr_reg_index(ctx, EA); \
6122 rs = gen_avr_ptr(rS(ctx->opcode)); \
6123 gen_helper_lve##name (rs, EA); \
6124 tcg_temp_free(EA); \
6125 tcg_temp_free_ptr(rs); \
6128 #define GEN_VR_STVE(name, opc2, opc3) \
6129 static void gen_stve##name(DisasContext *ctx) \
6131 TCGv EA; \
6132 TCGv_ptr rs; \
6133 if (unlikely(!ctx->altivec_enabled)) { \
6134 gen_exception(ctx, POWERPC_EXCP_VPU); \
6135 return; \
6137 gen_set_access_type(ctx, ACCESS_INT); \
6138 EA = tcg_temp_new(); \
6139 gen_addr_reg_index(ctx, EA); \
6140 rs = gen_avr_ptr(rS(ctx->opcode)); \
6141 gen_helper_stve##name (rs, EA); \
6142 tcg_temp_free(EA); \
6143 tcg_temp_free_ptr(rs); \
6146 GEN_VR_LDX(lvx, 0x07, 0x03);
6147 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6148 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6150 GEN_VR_LVE(bx, 0x07, 0x00);
6151 GEN_VR_LVE(hx, 0x07, 0x01);
6152 GEN_VR_LVE(wx, 0x07, 0x02);
6154 GEN_VR_STX(svx, 0x07, 0x07);
6155 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6156 GEN_VR_STX(svxl, 0x07, 0x0F);
6158 GEN_VR_STVE(bx, 0x07, 0x04);
6159 GEN_VR_STVE(hx, 0x07, 0x05);
6160 GEN_VR_STVE(wx, 0x07, 0x06);
6162 static void gen_lvsl(DisasContext *ctx)
6164 TCGv_ptr rd;
6165 TCGv EA;
6166 if (unlikely(!ctx->altivec_enabled)) {
6167 gen_exception(ctx, POWERPC_EXCP_VPU);
6168 return;
6170 EA = tcg_temp_new();
6171 gen_addr_reg_index(ctx, EA);
6172 rd = gen_avr_ptr(rD(ctx->opcode));
6173 gen_helper_lvsl(rd, EA);
6174 tcg_temp_free(EA);
6175 tcg_temp_free_ptr(rd);
6178 static void gen_lvsr(DisasContext *ctx)
6180 TCGv_ptr rd;
6181 TCGv EA;
6182 if (unlikely(!ctx->altivec_enabled)) {
6183 gen_exception(ctx, POWERPC_EXCP_VPU);
6184 return;
6186 EA = tcg_temp_new();
6187 gen_addr_reg_index(ctx, EA);
6188 rd = gen_avr_ptr(rD(ctx->opcode));
6189 gen_helper_lvsr(rd, EA);
6190 tcg_temp_free(EA);
6191 tcg_temp_free_ptr(rd);
6194 static void gen_mfvscr(DisasContext *ctx)
6196 TCGv_i32 t;
6197 if (unlikely(!ctx->altivec_enabled)) {
6198 gen_exception(ctx, POWERPC_EXCP_VPU);
6199 return;
6201 tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6202 t = tcg_temp_new_i32();
6203 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, vscr));
6204 tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6205 tcg_temp_free_i32(t);
6208 static void gen_mtvscr(DisasContext *ctx)
6210 TCGv_ptr p;
6211 if (unlikely(!ctx->altivec_enabled)) {
6212 gen_exception(ctx, POWERPC_EXCP_VPU);
6213 return;
6215 p = gen_avr_ptr(rD(ctx->opcode));
6216 gen_helper_mtvscr(p);
6217 tcg_temp_free_ptr(p);
6220 /* Logical operations */
6221 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6222 static void glue(gen_, name)(DisasContext *ctx) \
6224 if (unlikely(!ctx->altivec_enabled)) { \
6225 gen_exception(ctx, POWERPC_EXCP_VPU); \
6226 return; \
6228 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6229 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6232 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6233 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6234 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6235 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6236 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6238 #define GEN_VXFORM(name, opc2, opc3) \
6239 static void glue(gen_, name)(DisasContext *ctx) \
6241 TCGv_ptr ra, rb, rd; \
6242 if (unlikely(!ctx->altivec_enabled)) { \
6243 gen_exception(ctx, POWERPC_EXCP_VPU); \
6244 return; \
6246 ra = gen_avr_ptr(rA(ctx->opcode)); \
6247 rb = gen_avr_ptr(rB(ctx->opcode)); \
6248 rd = gen_avr_ptr(rD(ctx->opcode)); \
6249 gen_helper_##name (rd, ra, rb); \
6250 tcg_temp_free_ptr(ra); \
6251 tcg_temp_free_ptr(rb); \
6252 tcg_temp_free_ptr(rd); \
6255 GEN_VXFORM(vaddubm, 0, 0);
6256 GEN_VXFORM(vadduhm, 0, 1);
6257 GEN_VXFORM(vadduwm, 0, 2);
6258 GEN_VXFORM(vsububm, 0, 16);
6259 GEN_VXFORM(vsubuhm, 0, 17);
6260 GEN_VXFORM(vsubuwm, 0, 18);
6261 GEN_VXFORM(vmaxub, 1, 0);
6262 GEN_VXFORM(vmaxuh, 1, 1);
6263 GEN_VXFORM(vmaxuw, 1, 2);
6264 GEN_VXFORM(vmaxsb, 1, 4);
6265 GEN_VXFORM(vmaxsh, 1, 5);
6266 GEN_VXFORM(vmaxsw, 1, 6);
6267 GEN_VXFORM(vminub, 1, 8);
6268 GEN_VXFORM(vminuh, 1, 9);
6269 GEN_VXFORM(vminuw, 1, 10);
6270 GEN_VXFORM(vminsb, 1, 12);
6271 GEN_VXFORM(vminsh, 1, 13);
6272 GEN_VXFORM(vminsw, 1, 14);
6273 GEN_VXFORM(vavgub, 1, 16);
6274 GEN_VXFORM(vavguh, 1, 17);
6275 GEN_VXFORM(vavguw, 1, 18);
6276 GEN_VXFORM(vavgsb, 1, 20);
6277 GEN_VXFORM(vavgsh, 1, 21);
6278 GEN_VXFORM(vavgsw, 1, 22);
6279 GEN_VXFORM(vmrghb, 6, 0);
6280 GEN_VXFORM(vmrghh, 6, 1);
6281 GEN_VXFORM(vmrghw, 6, 2);
6282 GEN_VXFORM(vmrglb, 6, 4);
6283 GEN_VXFORM(vmrglh, 6, 5);
6284 GEN_VXFORM(vmrglw, 6, 6);
6285 GEN_VXFORM(vmuloub, 4, 0);
6286 GEN_VXFORM(vmulouh, 4, 1);
6287 GEN_VXFORM(vmulosb, 4, 4);
6288 GEN_VXFORM(vmulosh, 4, 5);
6289 GEN_VXFORM(vmuleub, 4, 8);
6290 GEN_VXFORM(vmuleuh, 4, 9);
6291 GEN_VXFORM(vmulesb, 4, 12);
6292 GEN_VXFORM(vmulesh, 4, 13);
6293 GEN_VXFORM(vslb, 2, 4);
6294 GEN_VXFORM(vslh, 2, 5);
6295 GEN_VXFORM(vslw, 2, 6);
6296 GEN_VXFORM(vsrb, 2, 8);
6297 GEN_VXFORM(vsrh, 2, 9);
6298 GEN_VXFORM(vsrw, 2, 10);
6299 GEN_VXFORM(vsrab, 2, 12);
6300 GEN_VXFORM(vsrah, 2, 13);
6301 GEN_VXFORM(vsraw, 2, 14);
6302 GEN_VXFORM(vslo, 6, 16);
6303 GEN_VXFORM(vsro, 6, 17);
6304 GEN_VXFORM(vaddcuw, 0, 6);
6305 GEN_VXFORM(vsubcuw, 0, 22);
6306 GEN_VXFORM(vaddubs, 0, 8);
6307 GEN_VXFORM(vadduhs, 0, 9);
6308 GEN_VXFORM(vadduws, 0, 10);
6309 GEN_VXFORM(vaddsbs, 0, 12);
6310 GEN_VXFORM(vaddshs, 0, 13);
6311 GEN_VXFORM(vaddsws, 0, 14);
6312 GEN_VXFORM(vsububs, 0, 24);
6313 GEN_VXFORM(vsubuhs, 0, 25);
6314 GEN_VXFORM(vsubuws, 0, 26);
6315 GEN_VXFORM(vsubsbs, 0, 28);
6316 GEN_VXFORM(vsubshs, 0, 29);
6317 GEN_VXFORM(vsubsws, 0, 30);
6318 GEN_VXFORM(vrlb, 2, 0);
6319 GEN_VXFORM(vrlh, 2, 1);
6320 GEN_VXFORM(vrlw, 2, 2);
6321 GEN_VXFORM(vsl, 2, 7);
6322 GEN_VXFORM(vsr, 2, 11);
6323 GEN_VXFORM(vpkuhum, 7, 0);
6324 GEN_VXFORM(vpkuwum, 7, 1);
6325 GEN_VXFORM(vpkuhus, 7, 2);
6326 GEN_VXFORM(vpkuwus, 7, 3);
6327 GEN_VXFORM(vpkshus, 7, 4);
6328 GEN_VXFORM(vpkswus, 7, 5);
6329 GEN_VXFORM(vpkshss, 7, 6);
6330 GEN_VXFORM(vpkswss, 7, 7);
6331 GEN_VXFORM(vpkpx, 7, 12);
6332 GEN_VXFORM(vsum4ubs, 4, 24);
6333 GEN_VXFORM(vsum4sbs, 4, 28);
6334 GEN_VXFORM(vsum4shs, 4, 25);
6335 GEN_VXFORM(vsum2sws, 4, 26);
6336 GEN_VXFORM(vsumsws, 4, 30);
6337 GEN_VXFORM(vaddfp, 5, 0);
6338 GEN_VXFORM(vsubfp, 5, 1);
6339 GEN_VXFORM(vmaxfp, 5, 16);
6340 GEN_VXFORM(vminfp, 5, 17);
6342 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6343 static void glue(gen_, name)(DisasContext *ctx) \
6345 TCGv_ptr ra, rb, rd; \
6346 if (unlikely(!ctx->altivec_enabled)) { \
6347 gen_exception(ctx, POWERPC_EXCP_VPU); \
6348 return; \
6350 ra = gen_avr_ptr(rA(ctx->opcode)); \
6351 rb = gen_avr_ptr(rB(ctx->opcode)); \
6352 rd = gen_avr_ptr(rD(ctx->opcode)); \
6353 gen_helper_##opname (rd, ra, rb); \
6354 tcg_temp_free_ptr(ra); \
6355 tcg_temp_free_ptr(rb); \
6356 tcg_temp_free_ptr(rd); \
6359 #define GEN_VXRFORM(name, opc2, opc3) \
6360 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6361 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6363 GEN_VXRFORM(vcmpequb, 3, 0)
6364 GEN_VXRFORM(vcmpequh, 3, 1)
6365 GEN_VXRFORM(vcmpequw, 3, 2)
6366 GEN_VXRFORM(vcmpgtsb, 3, 12)
6367 GEN_VXRFORM(vcmpgtsh, 3, 13)
6368 GEN_VXRFORM(vcmpgtsw, 3, 14)
6369 GEN_VXRFORM(vcmpgtub, 3, 8)
6370 GEN_VXRFORM(vcmpgtuh, 3, 9)
6371 GEN_VXRFORM(vcmpgtuw, 3, 10)
6372 GEN_VXRFORM(vcmpeqfp, 3, 3)
6373 GEN_VXRFORM(vcmpgefp, 3, 7)
6374 GEN_VXRFORM(vcmpgtfp, 3, 11)
6375 GEN_VXRFORM(vcmpbfp, 3, 15)
6377 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6378 static void glue(gen_, name)(DisasContext *ctx) \
6380 TCGv_ptr rd; \
6381 TCGv_i32 simm; \
6382 if (unlikely(!ctx->altivec_enabled)) { \
6383 gen_exception(ctx, POWERPC_EXCP_VPU); \
6384 return; \
6386 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6387 rd = gen_avr_ptr(rD(ctx->opcode)); \
6388 gen_helper_##name (rd, simm); \
6389 tcg_temp_free_i32(simm); \
6390 tcg_temp_free_ptr(rd); \
6393 GEN_VXFORM_SIMM(vspltisb, 6, 12);
6394 GEN_VXFORM_SIMM(vspltish, 6, 13);
6395 GEN_VXFORM_SIMM(vspltisw, 6, 14);
6397 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6398 static void glue(gen_, name)(DisasContext *ctx) \
6400 TCGv_ptr rb, rd; \
6401 if (unlikely(!ctx->altivec_enabled)) { \
6402 gen_exception(ctx, POWERPC_EXCP_VPU); \
6403 return; \
6405 rb = gen_avr_ptr(rB(ctx->opcode)); \
6406 rd = gen_avr_ptr(rD(ctx->opcode)); \
6407 gen_helper_##name (rd, rb); \
6408 tcg_temp_free_ptr(rb); \
6409 tcg_temp_free_ptr(rd); \
6412 GEN_VXFORM_NOA(vupkhsb, 7, 8);
6413 GEN_VXFORM_NOA(vupkhsh, 7, 9);
6414 GEN_VXFORM_NOA(vupklsb, 7, 10);
6415 GEN_VXFORM_NOA(vupklsh, 7, 11);
6416 GEN_VXFORM_NOA(vupkhpx, 7, 13);
6417 GEN_VXFORM_NOA(vupklpx, 7, 15);
6418 GEN_VXFORM_NOA(vrefp, 5, 4);
6419 GEN_VXFORM_NOA(vrsqrtefp, 5, 5);
6420 GEN_VXFORM_NOA(vexptefp, 5, 6);
6421 GEN_VXFORM_NOA(vlogefp, 5, 7);
6422 GEN_VXFORM_NOA(vrfim, 5, 8);
6423 GEN_VXFORM_NOA(vrfin, 5, 9);
6424 GEN_VXFORM_NOA(vrfip, 5, 10);
6425 GEN_VXFORM_NOA(vrfiz, 5, 11);
6427 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6428 static void glue(gen_, name)(DisasContext *ctx) \
6430 TCGv_ptr rd; \
6431 TCGv_i32 simm; \
6432 if (unlikely(!ctx->altivec_enabled)) { \
6433 gen_exception(ctx, POWERPC_EXCP_VPU); \
6434 return; \
6436 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6437 rd = gen_avr_ptr(rD(ctx->opcode)); \
6438 gen_helper_##name (rd, simm); \
6439 tcg_temp_free_i32(simm); \
6440 tcg_temp_free_ptr(rd); \
6443 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6444 static void glue(gen_, name)(DisasContext *ctx) \
6446 TCGv_ptr rb, rd; \
6447 TCGv_i32 uimm; \
6448 if (unlikely(!ctx->altivec_enabled)) { \
6449 gen_exception(ctx, POWERPC_EXCP_VPU); \
6450 return; \
6452 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6453 rb = gen_avr_ptr(rB(ctx->opcode)); \
6454 rd = gen_avr_ptr(rD(ctx->opcode)); \
6455 gen_helper_##name (rd, rb, uimm); \
6456 tcg_temp_free_i32(uimm); \
6457 tcg_temp_free_ptr(rb); \
6458 tcg_temp_free_ptr(rd); \
6461 GEN_VXFORM_UIMM(vspltb, 6, 8);
6462 GEN_VXFORM_UIMM(vsplth, 6, 9);
6463 GEN_VXFORM_UIMM(vspltw, 6, 10);
6464 GEN_VXFORM_UIMM(vcfux, 5, 12);
6465 GEN_VXFORM_UIMM(vcfsx, 5, 13);
6466 GEN_VXFORM_UIMM(vctuxs, 5, 14);
6467 GEN_VXFORM_UIMM(vctsxs, 5, 15);
6469 static void gen_vsldoi(DisasContext *ctx)
6471 TCGv_ptr ra, rb, rd;
6472 TCGv_i32 sh;
6473 if (unlikely(!ctx->altivec_enabled)) {
6474 gen_exception(ctx, POWERPC_EXCP_VPU);
6475 return;
6477 ra = gen_avr_ptr(rA(ctx->opcode));
6478 rb = gen_avr_ptr(rB(ctx->opcode));
6479 rd = gen_avr_ptr(rD(ctx->opcode));
6480 sh = tcg_const_i32(VSH(ctx->opcode));
6481 gen_helper_vsldoi (rd, ra, rb, sh);
6482 tcg_temp_free_ptr(ra);
6483 tcg_temp_free_ptr(rb);
6484 tcg_temp_free_ptr(rd);
6485 tcg_temp_free_i32(sh);
6488 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6489 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6491 TCGv_ptr ra, rb, rc, rd; \
6492 if (unlikely(!ctx->altivec_enabled)) { \
6493 gen_exception(ctx, POWERPC_EXCP_VPU); \
6494 return; \
6496 ra = gen_avr_ptr(rA(ctx->opcode)); \
6497 rb = gen_avr_ptr(rB(ctx->opcode)); \
6498 rc = gen_avr_ptr(rC(ctx->opcode)); \
6499 rd = gen_avr_ptr(rD(ctx->opcode)); \
6500 if (Rc(ctx->opcode)) { \
6501 gen_helper_##name1 (rd, ra, rb, rc); \
6502 } else { \
6503 gen_helper_##name0 (rd, ra, rb, rc); \
6505 tcg_temp_free_ptr(ra); \
6506 tcg_temp_free_ptr(rb); \
6507 tcg_temp_free_ptr(rc); \
6508 tcg_temp_free_ptr(rd); \
6511 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6513 static void gen_vmladduhm(DisasContext *ctx)
6515 TCGv_ptr ra, rb, rc, rd;
6516 if (unlikely(!ctx->altivec_enabled)) {
6517 gen_exception(ctx, POWERPC_EXCP_VPU);
6518 return;
6520 ra = gen_avr_ptr(rA(ctx->opcode));
6521 rb = gen_avr_ptr(rB(ctx->opcode));
6522 rc = gen_avr_ptr(rC(ctx->opcode));
6523 rd = gen_avr_ptr(rD(ctx->opcode));
6524 gen_helper_vmladduhm(rd, ra, rb, rc);
6525 tcg_temp_free_ptr(ra);
6526 tcg_temp_free_ptr(rb);
6527 tcg_temp_free_ptr(rc);
6528 tcg_temp_free_ptr(rd);
6531 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6532 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6533 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6534 GEN_VAFORM_PAIRED(vsel, vperm, 21)
6535 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6537 /*** SPE extension ***/
6538 /* Register moves */
6541 static inline void gen_evmra(DisasContext *ctx)
6544 if (unlikely(!ctx->spe_enabled)) {
6545 gen_exception(ctx, POWERPC_EXCP_APU);
6546 return;
6549 #if defined(TARGET_PPC64)
6550 /* rD := rA */
6551 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6553 /* spe_acc := rA */
6554 tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6555 cpu_env,
6556 offsetof(CPUState, spe_acc));
6557 #else
6558 TCGv_i64 tmp = tcg_temp_new_i64();
6560 /* tmp := rA_lo + rA_hi << 32 */
6561 tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6563 /* spe_acc := tmp */
6564 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
6565 tcg_temp_free_i64(tmp);
6567 /* rD := rA */
6568 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6569 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6570 #endif
6573 static inline void gen_load_gpr64(TCGv_i64 t, int reg)
6575 #if defined(TARGET_PPC64)
6576 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6577 #else
6578 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6579 #endif
6582 static inline void gen_store_gpr64(int reg, TCGv_i64 t)
6584 #if defined(TARGET_PPC64)
6585 tcg_gen_mov_i64(cpu_gpr[reg], t);
6586 #else
6587 TCGv_i64 tmp = tcg_temp_new_i64();
6588 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6589 tcg_gen_shri_i64(tmp, t, 32);
6590 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6591 tcg_temp_free_i64(tmp);
6592 #endif
6595 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6596 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6598 if (Rc(ctx->opcode)) \
6599 gen_##name1(ctx); \
6600 else \
6601 gen_##name0(ctx); \
6604 /* Handler for undefined SPE opcodes */
6605 static inline void gen_speundef(DisasContext *ctx)
6607 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6610 /* SPE logic */
6611 #if defined(TARGET_PPC64)
6612 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6613 static inline void gen_##name(DisasContext *ctx) \
6615 if (unlikely(!ctx->spe_enabled)) { \
6616 gen_exception(ctx, POWERPC_EXCP_APU); \
6617 return; \
6619 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6620 cpu_gpr[rB(ctx->opcode)]); \
6622 #else
6623 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6624 static inline void gen_##name(DisasContext *ctx) \
6626 if (unlikely(!ctx->spe_enabled)) { \
6627 gen_exception(ctx, POWERPC_EXCP_APU); \
6628 return; \
6630 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6631 cpu_gpr[rB(ctx->opcode)]); \
6632 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6633 cpu_gprh[rB(ctx->opcode)]); \
6635 #endif
6637 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6638 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6639 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6640 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6641 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6642 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6643 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6644 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6646 /* SPE logic immediate */
6647 #if defined(TARGET_PPC64)
6648 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6649 static inline void gen_##name(DisasContext *ctx) \
6651 if (unlikely(!ctx->spe_enabled)) { \
6652 gen_exception(ctx, POWERPC_EXCP_APU); \
6653 return; \
6655 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6656 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6657 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6658 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6659 tcg_opi(t0, t0, rB(ctx->opcode)); \
6660 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6661 tcg_gen_trunc_i64_i32(t1, t2); \
6662 tcg_temp_free_i64(t2); \
6663 tcg_opi(t1, t1, rB(ctx->opcode)); \
6664 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6665 tcg_temp_free_i32(t0); \
6666 tcg_temp_free_i32(t1); \
6668 #else
6669 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6670 static inline void gen_##name(DisasContext *ctx) \
6672 if (unlikely(!ctx->spe_enabled)) { \
6673 gen_exception(ctx, POWERPC_EXCP_APU); \
6674 return; \
6676 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6677 rB(ctx->opcode)); \
6678 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6679 rB(ctx->opcode)); \
6681 #endif
6682 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6683 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6684 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6685 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6687 /* SPE arithmetic */
6688 #if defined(TARGET_PPC64)
6689 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6690 static inline void gen_##name(DisasContext *ctx) \
6692 if (unlikely(!ctx->spe_enabled)) { \
6693 gen_exception(ctx, POWERPC_EXCP_APU); \
6694 return; \
6696 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6697 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6698 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6699 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6700 tcg_op(t0, t0); \
6701 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6702 tcg_gen_trunc_i64_i32(t1, t2); \
6703 tcg_temp_free_i64(t2); \
6704 tcg_op(t1, t1); \
6705 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6706 tcg_temp_free_i32(t0); \
6707 tcg_temp_free_i32(t1); \
6709 #else
6710 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6711 static inline void gen_##name(DisasContext *ctx) \
6713 if (unlikely(!ctx->spe_enabled)) { \
6714 gen_exception(ctx, POWERPC_EXCP_APU); \
6715 return; \
6717 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6718 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6720 #endif
6722 static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
6724 int l1 = gen_new_label();
6725 int l2 = gen_new_label();
6727 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6728 tcg_gen_neg_i32(ret, arg1);
6729 tcg_gen_br(l2);
6730 gen_set_label(l1);
6731 tcg_gen_mov_i32(ret, arg1);
6732 gen_set_label(l2);
6734 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6735 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6736 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6737 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6738 static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
6740 tcg_gen_addi_i32(ret, arg1, 0x8000);
6741 tcg_gen_ext16u_i32(ret, ret);
6743 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6744 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6745 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6747 #if defined(TARGET_PPC64)
6748 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6749 static inline void gen_##name(DisasContext *ctx) \
6751 if (unlikely(!ctx->spe_enabled)) { \
6752 gen_exception(ctx, POWERPC_EXCP_APU); \
6753 return; \
6755 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6756 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6757 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6758 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
6759 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6760 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6761 tcg_op(t0, t0, t2); \
6762 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6763 tcg_gen_trunc_i64_i32(t1, t3); \
6764 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6765 tcg_gen_trunc_i64_i32(t2, t3); \
6766 tcg_temp_free_i64(t3); \
6767 tcg_op(t1, t1, t2); \
6768 tcg_temp_free_i32(t2); \
6769 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6770 tcg_temp_free_i32(t0); \
6771 tcg_temp_free_i32(t1); \
6773 #else
6774 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6775 static inline void gen_##name(DisasContext *ctx) \
6777 if (unlikely(!ctx->spe_enabled)) { \
6778 gen_exception(ctx, POWERPC_EXCP_APU); \
6779 return; \
6781 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6782 cpu_gpr[rB(ctx->opcode)]); \
6783 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6784 cpu_gprh[rB(ctx->opcode)]); \
6786 #endif
6788 static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6790 TCGv_i32 t0;
6791 int l1, l2;
6793 l1 = gen_new_label();
6794 l2 = gen_new_label();
6795 t0 = tcg_temp_local_new_i32();
6796 /* No error here: 6 bits are used */
6797 tcg_gen_andi_i32(t0, arg2, 0x3F);
6798 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6799 tcg_gen_shr_i32(ret, arg1, t0);
6800 tcg_gen_br(l2);
6801 gen_set_label(l1);
6802 tcg_gen_movi_i32(ret, 0);
6803 gen_set_label(l2);
6804 tcg_temp_free_i32(t0);
6806 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6807 static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6809 TCGv_i32 t0;
6810 int l1, l2;
6812 l1 = gen_new_label();
6813 l2 = gen_new_label();
6814 t0 = tcg_temp_local_new_i32();
6815 /* No error here: 6 bits are used */
6816 tcg_gen_andi_i32(t0, arg2, 0x3F);
6817 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6818 tcg_gen_sar_i32(ret, arg1, t0);
6819 tcg_gen_br(l2);
6820 gen_set_label(l1);
6821 tcg_gen_movi_i32(ret, 0);
6822 gen_set_label(l2);
6823 tcg_temp_free_i32(t0);
6825 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6826 static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6828 TCGv_i32 t0;
6829 int l1, l2;
6831 l1 = gen_new_label();
6832 l2 = gen_new_label();
6833 t0 = tcg_temp_local_new_i32();
6834 /* No error here: 6 bits are used */
6835 tcg_gen_andi_i32(t0, arg2, 0x3F);
6836 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6837 tcg_gen_shl_i32(ret, arg1, t0);
6838 tcg_gen_br(l2);
6839 gen_set_label(l1);
6840 tcg_gen_movi_i32(ret, 0);
6841 gen_set_label(l2);
6842 tcg_temp_free_i32(t0);
6844 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6845 static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6847 TCGv_i32 t0 = tcg_temp_new_i32();
6848 tcg_gen_andi_i32(t0, arg2, 0x1F);
6849 tcg_gen_rotl_i32(ret, arg1, t0);
6850 tcg_temp_free_i32(t0);
6852 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6853 static inline void gen_evmergehi(DisasContext *ctx)
6855 if (unlikely(!ctx->spe_enabled)) {
6856 gen_exception(ctx, POWERPC_EXCP_APU);
6857 return;
6859 #if defined(TARGET_PPC64)
6860 TCGv t0 = tcg_temp_new();
6861 TCGv t1 = tcg_temp_new();
6862 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6863 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6864 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6865 tcg_temp_free(t0);
6866 tcg_temp_free(t1);
6867 #else
6868 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6869 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6870 #endif
6872 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6873 static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6875 tcg_gen_sub_i32(ret, arg2, arg1);
6877 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6879 /* SPE arithmetic immediate */
6880 #if defined(TARGET_PPC64)
6881 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6882 static inline void gen_##name(DisasContext *ctx) \
6884 if (unlikely(!ctx->spe_enabled)) { \
6885 gen_exception(ctx, POWERPC_EXCP_APU); \
6886 return; \
6888 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6889 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6890 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6891 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6892 tcg_op(t0, t0, rA(ctx->opcode)); \
6893 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6894 tcg_gen_trunc_i64_i32(t1, t2); \
6895 tcg_temp_free_i64(t2); \
6896 tcg_op(t1, t1, rA(ctx->opcode)); \
6897 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6898 tcg_temp_free_i32(t0); \
6899 tcg_temp_free_i32(t1); \
6901 #else
6902 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6903 static inline void gen_##name(DisasContext *ctx) \
6905 if (unlikely(!ctx->spe_enabled)) { \
6906 gen_exception(ctx, POWERPC_EXCP_APU); \
6907 return; \
6909 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6910 rA(ctx->opcode)); \
6911 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6912 rA(ctx->opcode)); \
6914 #endif
6915 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
6916 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
6918 /* SPE comparison */
6919 #if defined(TARGET_PPC64)
6920 #define GEN_SPEOP_COMP(name, tcg_cond) \
6921 static inline void gen_##name(DisasContext *ctx) \
6923 if (unlikely(!ctx->spe_enabled)) { \
6924 gen_exception(ctx, POWERPC_EXCP_APU); \
6925 return; \
6927 int l1 = gen_new_label(); \
6928 int l2 = gen_new_label(); \
6929 int l3 = gen_new_label(); \
6930 int l4 = gen_new_label(); \
6931 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6932 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6933 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6934 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6935 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
6936 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
6937 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
6938 tcg_gen_br(l2); \
6939 gen_set_label(l1); \
6940 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6941 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6942 gen_set_label(l2); \
6943 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6944 tcg_gen_trunc_i64_i32(t0, t2); \
6945 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6946 tcg_gen_trunc_i64_i32(t1, t2); \
6947 tcg_temp_free_i64(t2); \
6948 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
6949 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6950 ~(CRF_CH | CRF_CH_AND_CL)); \
6951 tcg_gen_br(l4); \
6952 gen_set_label(l3); \
6953 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6954 CRF_CH | CRF_CH_OR_CL); \
6955 gen_set_label(l4); \
6956 tcg_temp_free_i32(t0); \
6957 tcg_temp_free_i32(t1); \
6959 #else
6960 #define GEN_SPEOP_COMP(name, tcg_cond) \
6961 static inline void gen_##name(DisasContext *ctx) \
6963 if (unlikely(!ctx->spe_enabled)) { \
6964 gen_exception(ctx, POWERPC_EXCP_APU); \
6965 return; \
6967 int l1 = gen_new_label(); \
6968 int l2 = gen_new_label(); \
6969 int l3 = gen_new_label(); \
6970 int l4 = gen_new_label(); \
6972 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
6973 cpu_gpr[rB(ctx->opcode)], l1); \
6974 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
6975 tcg_gen_br(l2); \
6976 gen_set_label(l1); \
6977 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6978 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6979 gen_set_label(l2); \
6980 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
6981 cpu_gprh[rB(ctx->opcode)], l3); \
6982 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6983 ~(CRF_CH | CRF_CH_AND_CL)); \
6984 tcg_gen_br(l4); \
6985 gen_set_label(l3); \
6986 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6987 CRF_CH | CRF_CH_OR_CL); \
6988 gen_set_label(l4); \
6990 #endif
6991 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
6992 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
6993 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
6994 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
6995 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
6997 /* SPE misc */
6998 static inline void gen_brinc(DisasContext *ctx)
7000 /* Note: brinc is usable even if SPE is disabled */
7001 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7002 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7004 static inline void gen_evmergelo(DisasContext *ctx)
7006 if (unlikely(!ctx->spe_enabled)) {
7007 gen_exception(ctx, POWERPC_EXCP_APU);
7008 return;
7010 #if defined(TARGET_PPC64)
7011 TCGv t0 = tcg_temp_new();
7012 TCGv t1 = tcg_temp_new();
7013 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7014 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7015 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7016 tcg_temp_free(t0);
7017 tcg_temp_free(t1);
7018 #else
7019 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7020 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7021 #endif
7023 static inline void gen_evmergehilo(DisasContext *ctx)
7025 if (unlikely(!ctx->spe_enabled)) {
7026 gen_exception(ctx, POWERPC_EXCP_APU);
7027 return;
7029 #if defined(TARGET_PPC64)
7030 TCGv t0 = tcg_temp_new();
7031 TCGv t1 = tcg_temp_new();
7032 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7033 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7034 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7035 tcg_temp_free(t0);
7036 tcg_temp_free(t1);
7037 #else
7038 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7039 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7040 #endif
7042 static inline void gen_evmergelohi(DisasContext *ctx)
7044 if (unlikely(!ctx->spe_enabled)) {
7045 gen_exception(ctx, POWERPC_EXCP_APU);
7046 return;
7048 #if defined(TARGET_PPC64)
7049 TCGv t0 = tcg_temp_new();
7050 TCGv t1 = tcg_temp_new();
7051 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7052 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7053 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7054 tcg_temp_free(t0);
7055 tcg_temp_free(t1);
7056 #else
7057 if (rD(ctx->opcode) == rA(ctx->opcode)) {
7058 TCGv_i32 tmp = tcg_temp_new_i32();
7059 tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7060 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7061 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7062 tcg_temp_free_i32(tmp);
7063 } else {
7064 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7065 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7067 #endif
7069 static inline void gen_evsplati(DisasContext *ctx)
7071 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7073 #if defined(TARGET_PPC64)
7074 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7075 #else
7076 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7077 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7078 #endif
7080 static inline void gen_evsplatfi(DisasContext *ctx)
7082 uint64_t imm = rA(ctx->opcode) << 27;
7084 #if defined(TARGET_PPC64)
7085 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7086 #else
7087 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7088 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7089 #endif
7092 static inline void gen_evsel(DisasContext *ctx)
7094 int l1 = gen_new_label();
7095 int l2 = gen_new_label();
7096 int l3 = gen_new_label();
7097 int l4 = gen_new_label();
7098 TCGv_i32 t0 = tcg_temp_local_new_i32();
7099 #if defined(TARGET_PPC64)
7100 TCGv t1 = tcg_temp_local_new();
7101 TCGv t2 = tcg_temp_local_new();
7102 #endif
7103 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7104 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7105 #if defined(TARGET_PPC64)
7106 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7107 #else
7108 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7109 #endif
7110 tcg_gen_br(l2);
7111 gen_set_label(l1);
7112 #if defined(TARGET_PPC64)
7113 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7114 #else
7115 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7116 #endif
7117 gen_set_label(l2);
7118 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7119 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7120 #if defined(TARGET_PPC64)
7121 tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7122 #else
7123 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7124 #endif
7125 tcg_gen_br(l4);
7126 gen_set_label(l3);
7127 #if defined(TARGET_PPC64)
7128 tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7129 #else
7130 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7131 #endif
7132 gen_set_label(l4);
7133 tcg_temp_free_i32(t0);
7134 #if defined(TARGET_PPC64)
7135 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7136 tcg_temp_free(t1);
7137 tcg_temp_free(t2);
7138 #endif
7141 static void gen_evsel0(DisasContext *ctx)
7143 gen_evsel(ctx);
7146 static void gen_evsel1(DisasContext *ctx)
7148 gen_evsel(ctx);
7151 static void gen_evsel2(DisasContext *ctx)
7153 gen_evsel(ctx);
7156 static void gen_evsel3(DisasContext *ctx)
7158 gen_evsel(ctx);
7161 /* Multiply */
7163 static inline void gen_evmwumi(DisasContext *ctx)
7165 TCGv_i64 t0, t1;
7167 if (unlikely(!ctx->spe_enabled)) {
7168 gen_exception(ctx, POWERPC_EXCP_APU);
7169 return;
7172 t0 = tcg_temp_new_i64();
7173 t1 = tcg_temp_new_i64();
7175 /* t0 := rA; t1 := rB */
7176 #if defined(TARGET_PPC64)
7177 tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7178 tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7179 #else
7180 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7181 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7182 #endif
7184 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7186 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7188 tcg_temp_free_i64(t0);
7189 tcg_temp_free_i64(t1);
7192 static inline void gen_evmwumia(DisasContext *ctx)
7194 TCGv_i64 tmp;
7196 if (unlikely(!ctx->spe_enabled)) {
7197 gen_exception(ctx, POWERPC_EXCP_APU);
7198 return;
7201 gen_evmwumi(ctx); /* rD := rA * rB */
7203 tmp = tcg_temp_new_i64();
7205 /* acc := rD */
7206 gen_load_gpr64(tmp, rD(ctx->opcode));
7207 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7208 tcg_temp_free_i64(tmp);
7211 static inline void gen_evmwumiaa(DisasContext *ctx)
7213 TCGv_i64 acc;
7214 TCGv_i64 tmp;
7216 if (unlikely(!ctx->spe_enabled)) {
7217 gen_exception(ctx, POWERPC_EXCP_APU);
7218 return;
7221 gen_evmwumi(ctx); /* rD := rA * rB */
7223 acc = tcg_temp_new_i64();
7224 tmp = tcg_temp_new_i64();
7226 /* tmp := rD */
7227 gen_load_gpr64(tmp, rD(ctx->opcode));
7229 /* Load acc */
7230 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7232 /* acc := tmp + acc */
7233 tcg_gen_add_i64(acc, acc, tmp);
7235 /* Store acc */
7236 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7238 /* rD := acc */
7239 gen_store_gpr64(rD(ctx->opcode), acc);
7241 tcg_temp_free_i64(acc);
7242 tcg_temp_free_i64(tmp);
7245 static inline void gen_evmwsmi(DisasContext *ctx)
7247 TCGv_i64 t0, t1;
7249 if (unlikely(!ctx->spe_enabled)) {
7250 gen_exception(ctx, POWERPC_EXCP_APU);
7251 return;
7254 t0 = tcg_temp_new_i64();
7255 t1 = tcg_temp_new_i64();
7257 /* t0 := rA; t1 := rB */
7258 #if defined(TARGET_PPC64)
7259 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7260 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7261 #else
7262 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7263 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7264 #endif
7266 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7268 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7270 tcg_temp_free_i64(t0);
7271 tcg_temp_free_i64(t1);
7274 static inline void gen_evmwsmia(DisasContext *ctx)
7276 TCGv_i64 tmp;
7278 gen_evmwsmi(ctx); /* rD := rA * rB */
7280 tmp = tcg_temp_new_i64();
7282 /* acc := rD */
7283 gen_load_gpr64(tmp, rD(ctx->opcode));
7284 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7286 tcg_temp_free_i64(tmp);
7289 static inline void gen_evmwsmiaa(DisasContext *ctx)
7291 TCGv_i64 acc = tcg_temp_new_i64();
7292 TCGv_i64 tmp = tcg_temp_new_i64();
7294 gen_evmwsmi(ctx); /* rD := rA * rB */
7296 acc = tcg_temp_new_i64();
7297 tmp = tcg_temp_new_i64();
7299 /* tmp := rD */
7300 gen_load_gpr64(tmp, rD(ctx->opcode));
7302 /* Load acc */
7303 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7305 /* acc := tmp + acc */
7306 tcg_gen_add_i64(acc, acc, tmp);
7308 /* Store acc */
7309 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7311 /* rD := acc */
7312 gen_store_gpr64(rD(ctx->opcode), acc);
7314 tcg_temp_free_i64(acc);
7315 tcg_temp_free_i64(tmp);
7318 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
7319 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
7320 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
7321 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
7322 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
7323 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
7324 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
7325 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
7326 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE);
7327 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
7328 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
7329 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
7330 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
7331 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7332 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7333 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7334 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
7335 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
7336 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
7337 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
7338 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
7339 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
7340 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
7341 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
7342 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
7343 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
7344 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
7345 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
7346 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
7348 /* SPE load and stores */
7349 static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7351 target_ulong uimm = rB(ctx->opcode);
7353 if (rA(ctx->opcode) == 0) {
7354 tcg_gen_movi_tl(EA, uimm << sh);
7355 } else {
7356 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7357 #if defined(TARGET_PPC64)
7358 if (!ctx->sf_mode) {
7359 tcg_gen_ext32u_tl(EA, EA);
7361 #endif
7365 static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7367 #if defined(TARGET_PPC64)
7368 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7369 #else
7370 TCGv_i64 t0 = tcg_temp_new_i64();
7371 gen_qemu_ld64(ctx, t0, addr);
7372 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7373 tcg_gen_shri_i64(t0, t0, 32);
7374 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7375 tcg_temp_free_i64(t0);
7376 #endif
7379 static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7381 #if defined(TARGET_PPC64)
7382 TCGv t0 = tcg_temp_new();
7383 gen_qemu_ld32u(ctx, t0, addr);
7384 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7385 gen_addr_add(ctx, addr, addr, 4);
7386 gen_qemu_ld32u(ctx, t0, addr);
7387 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7388 tcg_temp_free(t0);
7389 #else
7390 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7391 gen_addr_add(ctx, addr, addr, 4);
7392 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7393 #endif
7396 static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7398 TCGv t0 = tcg_temp_new();
7399 #if defined(TARGET_PPC64)
7400 gen_qemu_ld16u(ctx, t0, addr);
7401 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7402 gen_addr_add(ctx, addr, addr, 2);
7403 gen_qemu_ld16u(ctx, t0, addr);
7404 tcg_gen_shli_tl(t0, t0, 32);
7405 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7406 gen_addr_add(ctx, addr, addr, 2);
7407 gen_qemu_ld16u(ctx, t0, addr);
7408 tcg_gen_shli_tl(t0, t0, 16);
7409 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7410 gen_addr_add(ctx, addr, addr, 2);
7411 gen_qemu_ld16u(ctx, t0, addr);
7412 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7413 #else
7414 gen_qemu_ld16u(ctx, t0, addr);
7415 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7416 gen_addr_add(ctx, addr, addr, 2);
7417 gen_qemu_ld16u(ctx, t0, addr);
7418 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7419 gen_addr_add(ctx, addr, addr, 2);
7420 gen_qemu_ld16u(ctx, t0, addr);
7421 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7422 gen_addr_add(ctx, addr, addr, 2);
7423 gen_qemu_ld16u(ctx, t0, addr);
7424 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7425 #endif
7426 tcg_temp_free(t0);
7429 static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7431 TCGv t0 = tcg_temp_new();
7432 gen_qemu_ld16u(ctx, t0, addr);
7433 #if defined(TARGET_PPC64)
7434 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7435 tcg_gen_shli_tl(t0, t0, 16);
7436 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7437 #else
7438 tcg_gen_shli_tl(t0, t0, 16);
7439 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7440 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7441 #endif
7442 tcg_temp_free(t0);
7445 static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7447 TCGv t0 = tcg_temp_new();
7448 gen_qemu_ld16u(ctx, t0, addr);
7449 #if defined(TARGET_PPC64)
7450 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7451 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7452 #else
7453 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7454 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7455 #endif
7456 tcg_temp_free(t0);
7459 static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7461 TCGv t0 = tcg_temp_new();
7462 gen_qemu_ld16s(ctx, t0, addr);
7463 #if defined(TARGET_PPC64)
7464 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7465 tcg_gen_ext32u_tl(t0, t0);
7466 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7467 #else
7468 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7469 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7470 #endif
7471 tcg_temp_free(t0);
7474 static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7476 TCGv t0 = tcg_temp_new();
7477 #if defined(TARGET_PPC64)
7478 gen_qemu_ld16u(ctx, t0, addr);
7479 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7480 gen_addr_add(ctx, addr, addr, 2);
7481 gen_qemu_ld16u(ctx, t0, addr);
7482 tcg_gen_shli_tl(t0, t0, 16);
7483 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7484 #else
7485 gen_qemu_ld16u(ctx, t0, addr);
7486 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7487 gen_addr_add(ctx, addr, addr, 2);
7488 gen_qemu_ld16u(ctx, t0, addr);
7489 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7490 #endif
7491 tcg_temp_free(t0);
7494 static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7496 #if defined(TARGET_PPC64)
7497 TCGv t0 = tcg_temp_new();
7498 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7499 gen_addr_add(ctx, addr, addr, 2);
7500 gen_qemu_ld16u(ctx, t0, addr);
7501 tcg_gen_shli_tl(t0, t0, 32);
7502 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7503 tcg_temp_free(t0);
7504 #else
7505 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7506 gen_addr_add(ctx, addr, addr, 2);
7507 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7508 #endif
7511 static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7513 #if defined(TARGET_PPC64)
7514 TCGv t0 = tcg_temp_new();
7515 gen_qemu_ld16s(ctx, t0, addr);
7516 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7517 gen_addr_add(ctx, addr, addr, 2);
7518 gen_qemu_ld16s(ctx, t0, addr);
7519 tcg_gen_shli_tl(t0, t0, 32);
7520 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7521 tcg_temp_free(t0);
7522 #else
7523 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7524 gen_addr_add(ctx, addr, addr, 2);
7525 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7526 #endif
7529 static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7531 TCGv t0 = tcg_temp_new();
7532 gen_qemu_ld32u(ctx, t0, addr);
7533 #if defined(TARGET_PPC64)
7534 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7535 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7536 #else
7537 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7538 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7539 #endif
7540 tcg_temp_free(t0);
7543 static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7545 TCGv t0 = tcg_temp_new();
7546 #if defined(TARGET_PPC64)
7547 gen_qemu_ld16u(ctx, t0, addr);
7548 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7549 tcg_gen_shli_tl(t0, t0, 32);
7550 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7551 gen_addr_add(ctx, addr, addr, 2);
7552 gen_qemu_ld16u(ctx, t0, addr);
7553 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7554 tcg_gen_shli_tl(t0, t0, 16);
7555 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7556 #else
7557 gen_qemu_ld16u(ctx, t0, addr);
7558 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7559 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7560 gen_addr_add(ctx, addr, addr, 2);
7561 gen_qemu_ld16u(ctx, t0, addr);
7562 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7563 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7564 #endif
7565 tcg_temp_free(t0);
7568 static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
7570 #if defined(TARGET_PPC64)
7571 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7572 #else
7573 TCGv_i64 t0 = tcg_temp_new_i64();
7574 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
7575 gen_qemu_st64(ctx, t0, addr);
7576 tcg_temp_free_i64(t0);
7577 #endif
7580 static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
7582 #if defined(TARGET_PPC64)
7583 TCGv t0 = tcg_temp_new();
7584 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7585 gen_qemu_st32(ctx, t0, addr);
7586 tcg_temp_free(t0);
7587 #else
7588 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7589 #endif
7590 gen_addr_add(ctx, addr, addr, 4);
7591 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7594 static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7596 TCGv t0 = tcg_temp_new();
7597 #if defined(TARGET_PPC64)
7598 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7599 #else
7600 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7601 #endif
7602 gen_qemu_st16(ctx, t0, addr);
7603 gen_addr_add(ctx, addr, addr, 2);
7604 #if defined(TARGET_PPC64)
7605 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7606 gen_qemu_st16(ctx, t0, addr);
7607 #else
7608 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7609 #endif
7610 gen_addr_add(ctx, addr, addr, 2);
7611 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7612 gen_qemu_st16(ctx, t0, addr);
7613 tcg_temp_free(t0);
7614 gen_addr_add(ctx, addr, addr, 2);
7615 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7618 static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7620 TCGv t0 = tcg_temp_new();
7621 #if defined(TARGET_PPC64)
7622 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7623 #else
7624 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7625 #endif
7626 gen_qemu_st16(ctx, t0, addr);
7627 gen_addr_add(ctx, addr, addr, 2);
7628 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7629 gen_qemu_st16(ctx, t0, addr);
7630 tcg_temp_free(t0);
7633 static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7635 #if defined(TARGET_PPC64)
7636 TCGv t0 = tcg_temp_new();
7637 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7638 gen_qemu_st16(ctx, t0, addr);
7639 tcg_temp_free(t0);
7640 #else
7641 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7642 #endif
7643 gen_addr_add(ctx, addr, addr, 2);
7644 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7647 static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7649 #if defined(TARGET_PPC64)
7650 TCGv t0 = tcg_temp_new();
7651 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7652 gen_qemu_st32(ctx, t0, addr);
7653 tcg_temp_free(t0);
7654 #else
7655 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7656 #endif
7659 static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7661 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7664 #define GEN_SPEOP_LDST(name, opc2, sh) \
7665 static void glue(gen_, name)(DisasContext *ctx) \
7667 TCGv t0; \
7668 if (unlikely(!ctx->spe_enabled)) { \
7669 gen_exception(ctx, POWERPC_EXCP_APU); \
7670 return; \
7672 gen_set_access_type(ctx, ACCESS_INT); \
7673 t0 = tcg_temp_new(); \
7674 if (Rc(ctx->opcode)) { \
7675 gen_addr_spe_imm_index(ctx, t0, sh); \
7676 } else { \
7677 gen_addr_reg_index(ctx, t0); \
7679 gen_op_##name(ctx, t0); \
7680 tcg_temp_free(t0); \
7683 GEN_SPEOP_LDST(evldd, 0x00, 3);
7684 GEN_SPEOP_LDST(evldw, 0x01, 3);
7685 GEN_SPEOP_LDST(evldh, 0x02, 3);
7686 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7687 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7688 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7689 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7690 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7691 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7692 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7693 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7695 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7696 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7697 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7698 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7699 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7700 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7701 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7703 /* Multiply and add - TODO */
7704 #if 0
7705 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7706 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7707 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7708 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7709 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7710 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7711 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7712 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7713 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7714 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7715 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7716 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7718 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7719 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7720 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7721 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7722 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7723 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7724 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7725 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7726 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7727 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7728 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7729 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7731 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7732 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7733 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7734 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7735 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7737 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7738 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7739 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7740 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7741 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7742 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7743 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7744 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7745 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7746 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7747 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7748 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7750 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7751 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7752 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7753 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7755 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7756 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7757 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7758 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7759 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7760 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7761 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7762 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7763 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7764 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7765 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7766 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7768 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7769 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7770 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7771 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7772 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7773 #endif
7775 /*** SPE floating-point extension ***/
7776 #if defined(TARGET_PPC64)
7777 #define GEN_SPEFPUOP_CONV_32_32(name) \
7778 static inline void gen_##name(DisasContext *ctx) \
7780 TCGv_i32 t0; \
7781 TCGv t1; \
7782 t0 = tcg_temp_new_i32(); \
7783 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7784 gen_helper_##name(t0, t0); \
7785 t1 = tcg_temp_new(); \
7786 tcg_gen_extu_i32_tl(t1, t0); \
7787 tcg_temp_free_i32(t0); \
7788 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7789 0xFFFFFFFF00000000ULL); \
7790 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7791 tcg_temp_free(t1); \
7793 #define GEN_SPEFPUOP_CONV_32_64(name) \
7794 static inline void gen_##name(DisasContext *ctx) \
7796 TCGv_i32 t0; \
7797 TCGv t1; \
7798 t0 = tcg_temp_new_i32(); \
7799 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7800 t1 = tcg_temp_new(); \
7801 tcg_gen_extu_i32_tl(t1, t0); \
7802 tcg_temp_free_i32(t0); \
7803 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7804 0xFFFFFFFF00000000ULL); \
7805 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7806 tcg_temp_free(t1); \
7808 #define GEN_SPEFPUOP_CONV_64_32(name) \
7809 static inline void gen_##name(DisasContext *ctx) \
7811 TCGv_i32 t0 = tcg_temp_new_i32(); \
7812 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7813 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7814 tcg_temp_free_i32(t0); \
7816 #define GEN_SPEFPUOP_CONV_64_64(name) \
7817 static inline void gen_##name(DisasContext *ctx) \
7819 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7821 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7822 static inline void gen_##name(DisasContext *ctx) \
7824 TCGv_i32 t0, t1; \
7825 TCGv_i64 t2; \
7826 if (unlikely(!ctx->spe_enabled)) { \
7827 gen_exception(ctx, POWERPC_EXCP_APU); \
7828 return; \
7830 t0 = tcg_temp_new_i32(); \
7831 t1 = tcg_temp_new_i32(); \
7832 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7833 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7834 gen_helper_##name(t0, t0, t1); \
7835 tcg_temp_free_i32(t1); \
7836 t2 = tcg_temp_new(); \
7837 tcg_gen_extu_i32_tl(t2, t0); \
7838 tcg_temp_free_i32(t0); \
7839 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7840 0xFFFFFFFF00000000ULL); \
7841 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7842 tcg_temp_free(t2); \
7844 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7845 static inline void gen_##name(DisasContext *ctx) \
7847 if (unlikely(!ctx->spe_enabled)) { \
7848 gen_exception(ctx, POWERPC_EXCP_APU); \
7849 return; \
7851 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7852 cpu_gpr[rB(ctx->opcode)]); \
7854 #define GEN_SPEFPUOP_COMP_32(name) \
7855 static inline void gen_##name(DisasContext *ctx) \
7857 TCGv_i32 t0, t1; \
7858 if (unlikely(!ctx->spe_enabled)) { \
7859 gen_exception(ctx, POWERPC_EXCP_APU); \
7860 return; \
7862 t0 = tcg_temp_new_i32(); \
7863 t1 = tcg_temp_new_i32(); \
7864 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7865 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7866 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7867 tcg_temp_free_i32(t0); \
7868 tcg_temp_free_i32(t1); \
7870 #define GEN_SPEFPUOP_COMP_64(name) \
7871 static inline void gen_##name(DisasContext *ctx) \
7873 if (unlikely(!ctx->spe_enabled)) { \
7874 gen_exception(ctx, POWERPC_EXCP_APU); \
7875 return; \
7877 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7878 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7880 #else
7881 #define GEN_SPEFPUOP_CONV_32_32(name) \
7882 static inline void gen_##name(DisasContext *ctx) \
7884 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7886 #define GEN_SPEFPUOP_CONV_32_64(name) \
7887 static inline void gen_##name(DisasContext *ctx) \
7889 TCGv_i64 t0 = tcg_temp_new_i64(); \
7890 gen_load_gpr64(t0, rB(ctx->opcode)); \
7891 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7892 tcg_temp_free_i64(t0); \
7894 #define GEN_SPEFPUOP_CONV_64_32(name) \
7895 static inline void gen_##name(DisasContext *ctx) \
7897 TCGv_i64 t0 = tcg_temp_new_i64(); \
7898 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7899 gen_store_gpr64(rD(ctx->opcode), t0); \
7900 tcg_temp_free_i64(t0); \
7902 #define GEN_SPEFPUOP_CONV_64_64(name) \
7903 static inline void gen_##name(DisasContext *ctx) \
7905 TCGv_i64 t0 = tcg_temp_new_i64(); \
7906 gen_load_gpr64(t0, rB(ctx->opcode)); \
7907 gen_helper_##name(t0, t0); \
7908 gen_store_gpr64(rD(ctx->opcode), t0); \
7909 tcg_temp_free_i64(t0); \
7911 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7912 static inline void gen_##name(DisasContext *ctx) \
7914 if (unlikely(!ctx->spe_enabled)) { \
7915 gen_exception(ctx, POWERPC_EXCP_APU); \
7916 return; \
7918 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
7919 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7921 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7922 static inline void gen_##name(DisasContext *ctx) \
7924 TCGv_i64 t0, t1; \
7925 if (unlikely(!ctx->spe_enabled)) { \
7926 gen_exception(ctx, POWERPC_EXCP_APU); \
7927 return; \
7929 t0 = tcg_temp_new_i64(); \
7930 t1 = tcg_temp_new_i64(); \
7931 gen_load_gpr64(t0, rA(ctx->opcode)); \
7932 gen_load_gpr64(t1, rB(ctx->opcode)); \
7933 gen_helper_##name(t0, t0, t1); \
7934 gen_store_gpr64(rD(ctx->opcode), t0); \
7935 tcg_temp_free_i64(t0); \
7936 tcg_temp_free_i64(t1); \
7938 #define GEN_SPEFPUOP_COMP_32(name) \
7939 static inline void gen_##name(DisasContext *ctx) \
7941 if (unlikely(!ctx->spe_enabled)) { \
7942 gen_exception(ctx, POWERPC_EXCP_APU); \
7943 return; \
7945 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7946 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7948 #define GEN_SPEFPUOP_COMP_64(name) \
7949 static inline void gen_##name(DisasContext *ctx) \
7951 TCGv_i64 t0, t1; \
7952 if (unlikely(!ctx->spe_enabled)) { \
7953 gen_exception(ctx, POWERPC_EXCP_APU); \
7954 return; \
7956 t0 = tcg_temp_new_i64(); \
7957 t1 = tcg_temp_new_i64(); \
7958 gen_load_gpr64(t0, rA(ctx->opcode)); \
7959 gen_load_gpr64(t1, rB(ctx->opcode)); \
7960 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7961 tcg_temp_free_i64(t0); \
7962 tcg_temp_free_i64(t1); \
7964 #endif
7966 /* Single precision floating-point vectors operations */
7967 /* Arithmetic */
7968 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
7969 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
7970 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
7971 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
7972 static inline void gen_evfsabs(DisasContext *ctx)
7974 if (unlikely(!ctx->spe_enabled)) {
7975 gen_exception(ctx, POWERPC_EXCP_APU);
7976 return;
7978 #if defined(TARGET_PPC64)
7979 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
7980 #else
7981 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
7982 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
7983 #endif
7985 static inline void gen_evfsnabs(DisasContext *ctx)
7987 if (unlikely(!ctx->spe_enabled)) {
7988 gen_exception(ctx, POWERPC_EXCP_APU);
7989 return;
7991 #if defined(TARGET_PPC64)
7992 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
7993 #else
7994 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7995 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7996 #endif
7998 static inline void gen_evfsneg(DisasContext *ctx)
8000 if (unlikely(!ctx->spe_enabled)) {
8001 gen_exception(ctx, POWERPC_EXCP_APU);
8002 return;
8004 #if defined(TARGET_PPC64)
8005 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8006 #else
8007 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8008 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8009 #endif
8012 /* Conversion */
8013 GEN_SPEFPUOP_CONV_64_64(evfscfui);
8014 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8015 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8016 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8017 GEN_SPEFPUOP_CONV_64_64(evfsctui);
8018 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8019 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8020 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8021 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8022 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8024 /* Comparison */
8025 GEN_SPEFPUOP_COMP_64(evfscmpgt);
8026 GEN_SPEFPUOP_COMP_64(evfscmplt);
8027 GEN_SPEFPUOP_COMP_64(evfscmpeq);
8028 GEN_SPEFPUOP_COMP_64(evfststgt);
8029 GEN_SPEFPUOP_COMP_64(evfststlt);
8030 GEN_SPEFPUOP_COMP_64(evfststeq);
8032 /* Opcodes definitions */
8033 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8034 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8035 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8036 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8037 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8038 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8039 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8040 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8041 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8042 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8043 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8044 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8045 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8046 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8048 /* Single precision floating-point operations */
8049 /* Arithmetic */
8050 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8051 GEN_SPEFPUOP_ARITH2_32_32(efssub);
8052 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8053 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8054 static inline void gen_efsabs(DisasContext *ctx)
8056 if (unlikely(!ctx->spe_enabled)) {
8057 gen_exception(ctx, POWERPC_EXCP_APU);
8058 return;
8060 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8062 static inline void gen_efsnabs(DisasContext *ctx)
8064 if (unlikely(!ctx->spe_enabled)) {
8065 gen_exception(ctx, POWERPC_EXCP_APU);
8066 return;
8068 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8070 static inline void gen_efsneg(DisasContext *ctx)
8072 if (unlikely(!ctx->spe_enabled)) {
8073 gen_exception(ctx, POWERPC_EXCP_APU);
8074 return;
8076 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8079 /* Conversion */
8080 GEN_SPEFPUOP_CONV_32_32(efscfui);
8081 GEN_SPEFPUOP_CONV_32_32(efscfsi);
8082 GEN_SPEFPUOP_CONV_32_32(efscfuf);
8083 GEN_SPEFPUOP_CONV_32_32(efscfsf);
8084 GEN_SPEFPUOP_CONV_32_32(efsctui);
8085 GEN_SPEFPUOP_CONV_32_32(efsctsi);
8086 GEN_SPEFPUOP_CONV_32_32(efsctuf);
8087 GEN_SPEFPUOP_CONV_32_32(efsctsf);
8088 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8089 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8090 GEN_SPEFPUOP_CONV_32_64(efscfd);
8092 /* Comparison */
8093 GEN_SPEFPUOP_COMP_32(efscmpgt);
8094 GEN_SPEFPUOP_COMP_32(efscmplt);
8095 GEN_SPEFPUOP_COMP_32(efscmpeq);
8096 GEN_SPEFPUOP_COMP_32(efststgt);
8097 GEN_SPEFPUOP_COMP_32(efststlt);
8098 GEN_SPEFPUOP_COMP_32(efststeq);
8100 /* Opcodes definitions */
8101 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8102 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8103 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8104 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8105 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8106 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8107 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8108 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8109 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8110 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8111 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8112 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8113 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8114 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8116 /* Double precision floating-point operations */
8117 /* Arithmetic */
8118 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8119 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8120 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8121 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8122 static inline void gen_efdabs(DisasContext *ctx)
8124 if (unlikely(!ctx->spe_enabled)) {
8125 gen_exception(ctx, POWERPC_EXCP_APU);
8126 return;
8128 #if defined(TARGET_PPC64)
8129 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8130 #else
8131 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8132 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8133 #endif
8135 static inline void gen_efdnabs(DisasContext *ctx)
8137 if (unlikely(!ctx->spe_enabled)) {
8138 gen_exception(ctx, POWERPC_EXCP_APU);
8139 return;
8141 #if defined(TARGET_PPC64)
8142 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8143 #else
8144 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8145 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8146 #endif
8148 static inline void gen_efdneg(DisasContext *ctx)
8150 if (unlikely(!ctx->spe_enabled)) {
8151 gen_exception(ctx, POWERPC_EXCP_APU);
8152 return;
8154 #if defined(TARGET_PPC64)
8155 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8156 #else
8157 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8158 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8159 #endif
8162 /* Conversion */
8163 GEN_SPEFPUOP_CONV_64_32(efdcfui);
8164 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8165 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8166 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8167 GEN_SPEFPUOP_CONV_32_64(efdctui);
8168 GEN_SPEFPUOP_CONV_32_64(efdctsi);
8169 GEN_SPEFPUOP_CONV_32_64(efdctuf);
8170 GEN_SPEFPUOP_CONV_32_64(efdctsf);
8171 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8172 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8173 GEN_SPEFPUOP_CONV_64_32(efdcfs);
8174 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8175 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8176 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8177 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8179 /* Comparison */
8180 GEN_SPEFPUOP_COMP_64(efdcmpgt);
8181 GEN_SPEFPUOP_COMP_64(efdcmplt);
8182 GEN_SPEFPUOP_COMP_64(efdcmpeq);
8183 GEN_SPEFPUOP_COMP_64(efdtstgt);
8184 GEN_SPEFPUOP_COMP_64(efdtstlt);
8185 GEN_SPEFPUOP_COMP_64(efdtsteq);
8187 /* Opcodes definitions */
8188 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8189 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8190 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8191 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8192 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8193 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8194 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8195 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8196 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8197 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8198 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8199 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8200 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8201 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8202 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8203 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8205 static opcode_t opcodes[] = {
8206 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8207 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8208 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8209 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8210 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8211 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8212 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8213 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8214 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8215 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8216 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8217 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8218 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8219 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8220 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8221 #if defined(TARGET_PPC64)
8222 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8223 #endif
8224 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8225 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8226 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8227 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8228 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8229 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8230 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8231 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8232 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8233 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8234 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8235 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8236 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8237 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8238 #if defined(TARGET_PPC64)
8239 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8240 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8241 #endif
8242 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8243 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8244 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8245 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8246 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8247 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8248 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8249 #if defined(TARGET_PPC64)
8250 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8251 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8252 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8253 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8254 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8255 #endif
8256 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8257 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8258 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8259 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8260 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8261 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8262 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8263 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8264 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8265 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8266 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00010000, PPC_FLOAT),
8267 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT),
8268 #if defined(TARGET_PPC64)
8269 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8270 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8271 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8272 #endif
8273 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8274 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8275 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8276 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8277 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8278 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8279 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8280 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8281 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8282 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8283 #if defined(TARGET_PPC64)
8284 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8285 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8286 #endif
8287 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8288 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8289 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8290 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8291 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8292 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8293 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8294 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8295 #if defined(TARGET_PPC64)
8296 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8297 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8298 #endif
8299 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8300 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8301 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8302 #if defined(TARGET_PPC64)
8303 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8304 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8305 #endif
8306 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8307 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8308 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8309 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8310 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8311 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8312 #if defined(TARGET_PPC64)
8313 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8314 #endif
8315 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8316 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8317 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8318 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8319 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8320 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8321 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8322 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ),
8323 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT),
8324 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8325 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8326 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8327 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8328 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8329 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8330 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8331 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8332 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8333 #if defined(TARGET_PPC64)
8334 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8335 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8336 PPC_SEGMENT_64B),
8337 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8338 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8339 PPC_SEGMENT_64B),
8340 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8341 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8342 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8343 #endif
8344 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8345 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8346 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8347 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8348 #if defined(TARGET_PPC64)
8349 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8350 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8351 #endif
8352 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8353 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8354 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8355 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8356 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8357 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8358 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8359 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8360 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8361 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8362 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8363 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8364 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8365 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8366 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8367 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8368 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8369 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8370 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8371 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8372 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8373 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8374 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8375 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8376 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8377 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8378 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8379 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8380 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8381 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8382 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8383 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8384 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8385 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8386 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8387 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8388 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8389 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8390 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8391 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8392 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8393 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8394 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8395 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8396 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8397 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8398 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8399 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8400 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8401 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8402 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8403 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8404 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8405 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8406 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8407 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8408 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8409 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8410 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8411 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8412 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8413 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8414 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8415 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8416 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8417 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8418 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8419 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8420 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8421 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8422 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8423 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE),
8424 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8425 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8426 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8427 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8428 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8429 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8430 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8431 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8432 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8433 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8434 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8435 GEN_HANDLER(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, PPC_BOOKE),
8436 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE),
8437 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE),
8438 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8439 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8440 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8441 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8442 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8443 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8444 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8445 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8446 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8447 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8449 #undef GEN_INT_ARITH_ADD
8450 #undef GEN_INT_ARITH_ADD_CONST
8451 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8452 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8453 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8454 add_ca, compute_ca, compute_ov) \
8455 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8456 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8457 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8458 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8459 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8460 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8461 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8462 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8463 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8464 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8465 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8467 #undef GEN_INT_ARITH_DIVW
8468 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8469 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8470 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8471 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8472 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8473 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8475 #if defined(TARGET_PPC64)
8476 #undef GEN_INT_ARITH_DIVD
8477 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8478 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8479 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8480 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8481 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8482 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8484 #undef GEN_INT_ARITH_MUL_HELPER
8485 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8486 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8487 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8488 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8489 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8490 #endif
8492 #undef GEN_INT_ARITH_SUBF
8493 #undef GEN_INT_ARITH_SUBF_CONST
8494 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8495 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8496 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8497 add_ca, compute_ca, compute_ov) \
8498 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8499 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8500 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8501 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8502 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8503 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8504 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8505 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8506 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8507 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8508 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8510 #undef GEN_LOGICAL1
8511 #undef GEN_LOGICAL2
8512 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8513 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8514 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8515 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8516 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8517 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8518 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8519 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8520 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8521 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8522 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8523 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8524 #if defined(TARGET_PPC64)
8525 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8526 #endif
8528 #if defined(TARGET_PPC64)
8529 #undef GEN_PPC64_R2
8530 #undef GEN_PPC64_R4
8531 #define GEN_PPC64_R2(name, opc1, opc2) \
8532 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8533 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8534 PPC_64B)
8535 #define GEN_PPC64_R4(name, opc1, opc2) \
8536 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8537 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8538 PPC_64B), \
8539 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8540 PPC_64B), \
8541 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8542 PPC_64B)
8543 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8544 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8545 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8546 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8547 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8548 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8549 #endif
8551 #undef _GEN_FLOAT_ACB
8552 #undef GEN_FLOAT_ACB
8553 #undef _GEN_FLOAT_AB
8554 #undef GEN_FLOAT_AB
8555 #undef _GEN_FLOAT_AC
8556 #undef GEN_FLOAT_AC
8557 #undef GEN_FLOAT_B
8558 #undef GEN_FLOAT_BS
8559 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
8560 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
8561 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
8562 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
8563 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
8564 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8565 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8566 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
8567 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8568 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8569 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8570 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8571 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
8572 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8573 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8574 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
8575 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
8576 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
8577 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
8579 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
8580 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
8581 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
8582 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
8583 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
8584 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
8585 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
8586 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
8587 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
8588 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
8589 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
8590 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
8591 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
8592 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
8593 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
8594 #if defined(TARGET_PPC64)
8595 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
8596 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
8597 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
8598 #endif
8599 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
8600 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
8601 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
8602 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
8603 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT),
8604 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT),
8605 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT),
8607 #undef GEN_LD
8608 #undef GEN_LDU
8609 #undef GEN_LDUX
8610 #undef GEN_LDX
8611 #undef GEN_LDS
8612 #define GEN_LD(name, ldop, opc, type) \
8613 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8614 #define GEN_LDU(name, ldop, opc, type) \
8615 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8616 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8617 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8618 #define GEN_LDX(name, ldop, opc2, opc3, type) \
8619 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8620 #define GEN_LDS(name, ldop, op, type) \
8621 GEN_LD(name, ldop, op | 0x20, type) \
8622 GEN_LDU(name, ldop, op | 0x21, type) \
8623 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8624 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8626 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8627 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8628 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8629 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8630 #if defined(TARGET_PPC64)
8631 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8632 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8633 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
8634 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
8635 #endif
8636 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8637 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8639 #undef GEN_ST
8640 #undef GEN_STU
8641 #undef GEN_STUX
8642 #undef GEN_STX
8643 #undef GEN_STS
8644 #define GEN_ST(name, stop, opc, type) \
8645 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8646 #define GEN_STU(name, stop, opc, type) \
8647 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8648 #define GEN_STUX(name, stop, opc2, opc3, type) \
8649 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8650 #define GEN_STX(name, stop, opc2, opc3, type) \
8651 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8652 #define GEN_STS(name, stop, op, type) \
8653 GEN_ST(name, stop, op | 0x20, type) \
8654 GEN_STU(name, stop, op | 0x21, type) \
8655 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8656 GEN_STX(name, stop, 0x17, op | 0x00, type)
8658 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8659 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8660 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8661 #if defined(TARGET_PPC64)
8662 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
8663 GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
8664 #endif
8665 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8666 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8668 #undef GEN_LDF
8669 #undef GEN_LDUF
8670 #undef GEN_LDUXF
8671 #undef GEN_LDXF
8672 #undef GEN_LDFS
8673 #define GEN_LDF(name, ldop, opc, type) \
8674 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8675 #define GEN_LDUF(name, ldop, opc, type) \
8676 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8677 #define GEN_LDUXF(name, ldop, opc, type) \
8678 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8679 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
8680 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8681 #define GEN_LDFS(name, ldop, op, type) \
8682 GEN_LDF(name, ldop, op | 0x20, type) \
8683 GEN_LDUF(name, ldop, op | 0x21, type) \
8684 GEN_LDUXF(name, ldop, op | 0x01, type) \
8685 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
8687 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
8688 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
8690 #undef GEN_STF
8691 #undef GEN_STUF
8692 #undef GEN_STUXF
8693 #undef GEN_STXF
8694 #undef GEN_STFS
8695 #define GEN_STF(name, stop, opc, type) \
8696 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8697 #define GEN_STUF(name, stop, opc, type) \
8698 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8699 #define GEN_STUXF(name, stop, opc, type) \
8700 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8701 #define GEN_STXF(name, stop, opc2, opc3, type) \
8702 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8703 #define GEN_STFS(name, stop, op, type) \
8704 GEN_STF(name, stop, op | 0x20, type) \
8705 GEN_STUF(name, stop, op | 0x21, type) \
8706 GEN_STUXF(name, stop, op | 0x01, type) \
8707 GEN_STXF(name, stop, 0x17, op | 0x00, type)
8709 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
8710 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
8711 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
8713 #undef GEN_CRLOGIC
8714 #define GEN_CRLOGIC(name, tcg_op, opc) \
8715 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8716 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8717 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8718 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8719 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8720 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8721 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8722 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8723 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8725 #undef GEN_MAC_HANDLER
8726 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8727 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8728 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8729 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8730 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8731 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8732 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8733 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8734 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8735 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8736 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8737 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8738 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8739 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8740 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8741 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8742 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8743 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8744 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8745 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8746 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8747 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8748 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8749 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8750 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8751 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8752 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8753 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8754 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8755 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8756 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8757 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8758 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8759 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8760 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8761 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8762 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8763 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8764 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8765 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8766 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8767 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8768 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8769 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8771 #undef GEN_VR_LDX
8772 #undef GEN_VR_STX
8773 #undef GEN_VR_LVE
8774 #undef GEN_VR_STVE
8775 #define GEN_VR_LDX(name, opc2, opc3) \
8776 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8777 #define GEN_VR_STX(name, opc2, opc3) \
8778 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8779 #define GEN_VR_LVE(name, opc2, opc3) \
8780 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8781 #define GEN_VR_STVE(name, opc2, opc3) \
8782 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8783 GEN_VR_LDX(lvx, 0x07, 0x03),
8784 GEN_VR_LDX(lvxl, 0x07, 0x0B),
8785 GEN_VR_LVE(bx, 0x07, 0x00),
8786 GEN_VR_LVE(hx, 0x07, 0x01),
8787 GEN_VR_LVE(wx, 0x07, 0x02),
8788 GEN_VR_STX(svx, 0x07, 0x07),
8789 GEN_VR_STX(svxl, 0x07, 0x0F),
8790 GEN_VR_STVE(bx, 0x07, 0x04),
8791 GEN_VR_STVE(hx, 0x07, 0x05),
8792 GEN_VR_STVE(wx, 0x07, 0x06),
8794 #undef GEN_VX_LOGICAL
8795 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
8796 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8797 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
8798 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
8799 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
8800 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
8801 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
8803 #undef GEN_VXFORM
8804 #define GEN_VXFORM(name, opc2, opc3) \
8805 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8806 GEN_VXFORM(vaddubm, 0, 0),
8807 GEN_VXFORM(vadduhm, 0, 1),
8808 GEN_VXFORM(vadduwm, 0, 2),
8809 GEN_VXFORM(vsububm, 0, 16),
8810 GEN_VXFORM(vsubuhm, 0, 17),
8811 GEN_VXFORM(vsubuwm, 0, 18),
8812 GEN_VXFORM(vmaxub, 1, 0),
8813 GEN_VXFORM(vmaxuh, 1, 1),
8814 GEN_VXFORM(vmaxuw, 1, 2),
8815 GEN_VXFORM(vmaxsb, 1, 4),
8816 GEN_VXFORM(vmaxsh, 1, 5),
8817 GEN_VXFORM(vmaxsw, 1, 6),
8818 GEN_VXFORM(vminub, 1, 8),
8819 GEN_VXFORM(vminuh, 1, 9),
8820 GEN_VXFORM(vminuw, 1, 10),
8821 GEN_VXFORM(vminsb, 1, 12),
8822 GEN_VXFORM(vminsh, 1, 13),
8823 GEN_VXFORM(vminsw, 1, 14),
8824 GEN_VXFORM(vavgub, 1, 16),
8825 GEN_VXFORM(vavguh, 1, 17),
8826 GEN_VXFORM(vavguw, 1, 18),
8827 GEN_VXFORM(vavgsb, 1, 20),
8828 GEN_VXFORM(vavgsh, 1, 21),
8829 GEN_VXFORM(vavgsw, 1, 22),
8830 GEN_VXFORM(vmrghb, 6, 0),
8831 GEN_VXFORM(vmrghh, 6, 1),
8832 GEN_VXFORM(vmrghw, 6, 2),
8833 GEN_VXFORM(vmrglb, 6, 4),
8834 GEN_VXFORM(vmrglh, 6, 5),
8835 GEN_VXFORM(vmrglw, 6, 6),
8836 GEN_VXFORM(vmuloub, 4, 0),
8837 GEN_VXFORM(vmulouh, 4, 1),
8838 GEN_VXFORM(vmulosb, 4, 4),
8839 GEN_VXFORM(vmulosh, 4, 5),
8840 GEN_VXFORM(vmuleub, 4, 8),
8841 GEN_VXFORM(vmuleuh, 4, 9),
8842 GEN_VXFORM(vmulesb, 4, 12),
8843 GEN_VXFORM(vmulesh, 4, 13),
8844 GEN_VXFORM(vslb, 2, 4),
8845 GEN_VXFORM(vslh, 2, 5),
8846 GEN_VXFORM(vslw, 2, 6),
8847 GEN_VXFORM(vsrb, 2, 8),
8848 GEN_VXFORM(vsrh, 2, 9),
8849 GEN_VXFORM(vsrw, 2, 10),
8850 GEN_VXFORM(vsrab, 2, 12),
8851 GEN_VXFORM(vsrah, 2, 13),
8852 GEN_VXFORM(vsraw, 2, 14),
8853 GEN_VXFORM(vslo, 6, 16),
8854 GEN_VXFORM(vsro, 6, 17),
8855 GEN_VXFORM(vaddcuw, 0, 6),
8856 GEN_VXFORM(vsubcuw, 0, 22),
8857 GEN_VXFORM(vaddubs, 0, 8),
8858 GEN_VXFORM(vadduhs, 0, 9),
8859 GEN_VXFORM(vadduws, 0, 10),
8860 GEN_VXFORM(vaddsbs, 0, 12),
8861 GEN_VXFORM(vaddshs, 0, 13),
8862 GEN_VXFORM(vaddsws, 0, 14),
8863 GEN_VXFORM(vsububs, 0, 24),
8864 GEN_VXFORM(vsubuhs, 0, 25),
8865 GEN_VXFORM(vsubuws, 0, 26),
8866 GEN_VXFORM(vsubsbs, 0, 28),
8867 GEN_VXFORM(vsubshs, 0, 29),
8868 GEN_VXFORM(vsubsws, 0, 30),
8869 GEN_VXFORM(vrlb, 2, 0),
8870 GEN_VXFORM(vrlh, 2, 1),
8871 GEN_VXFORM(vrlw, 2, 2),
8872 GEN_VXFORM(vsl, 2, 7),
8873 GEN_VXFORM(vsr, 2, 11),
8874 GEN_VXFORM(vpkuhum, 7, 0),
8875 GEN_VXFORM(vpkuwum, 7, 1),
8876 GEN_VXFORM(vpkuhus, 7, 2),
8877 GEN_VXFORM(vpkuwus, 7, 3),
8878 GEN_VXFORM(vpkshus, 7, 4),
8879 GEN_VXFORM(vpkswus, 7, 5),
8880 GEN_VXFORM(vpkshss, 7, 6),
8881 GEN_VXFORM(vpkswss, 7, 7),
8882 GEN_VXFORM(vpkpx, 7, 12),
8883 GEN_VXFORM(vsum4ubs, 4, 24),
8884 GEN_VXFORM(vsum4sbs, 4, 28),
8885 GEN_VXFORM(vsum4shs, 4, 25),
8886 GEN_VXFORM(vsum2sws, 4, 26),
8887 GEN_VXFORM(vsumsws, 4, 30),
8888 GEN_VXFORM(vaddfp, 5, 0),
8889 GEN_VXFORM(vsubfp, 5, 1),
8890 GEN_VXFORM(vmaxfp, 5, 16),
8891 GEN_VXFORM(vminfp, 5, 17),
8893 #undef GEN_VXRFORM1
8894 #undef GEN_VXRFORM
8895 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
8896 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
8897 #define GEN_VXRFORM(name, opc2, opc3) \
8898 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
8899 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
8900 GEN_VXRFORM(vcmpequb, 3, 0)
8901 GEN_VXRFORM(vcmpequh, 3, 1)
8902 GEN_VXRFORM(vcmpequw, 3, 2)
8903 GEN_VXRFORM(vcmpgtsb, 3, 12)
8904 GEN_VXRFORM(vcmpgtsh, 3, 13)
8905 GEN_VXRFORM(vcmpgtsw, 3, 14)
8906 GEN_VXRFORM(vcmpgtub, 3, 8)
8907 GEN_VXRFORM(vcmpgtuh, 3, 9)
8908 GEN_VXRFORM(vcmpgtuw, 3, 10)
8909 GEN_VXRFORM(vcmpeqfp, 3, 3)
8910 GEN_VXRFORM(vcmpgefp, 3, 7)
8911 GEN_VXRFORM(vcmpgtfp, 3, 11)
8912 GEN_VXRFORM(vcmpbfp, 3, 15)
8914 #undef GEN_VXFORM_SIMM
8915 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
8916 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8917 GEN_VXFORM_SIMM(vspltisb, 6, 12),
8918 GEN_VXFORM_SIMM(vspltish, 6, 13),
8919 GEN_VXFORM_SIMM(vspltisw, 6, 14),
8921 #undef GEN_VXFORM_NOA
8922 #define GEN_VXFORM_NOA(name, opc2, opc3) \
8923 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
8924 GEN_VXFORM_NOA(vupkhsb, 7, 8),
8925 GEN_VXFORM_NOA(vupkhsh, 7, 9),
8926 GEN_VXFORM_NOA(vupklsb, 7, 10),
8927 GEN_VXFORM_NOA(vupklsh, 7, 11),
8928 GEN_VXFORM_NOA(vupkhpx, 7, 13),
8929 GEN_VXFORM_NOA(vupklpx, 7, 15),
8930 GEN_VXFORM_NOA(vrefp, 5, 4),
8931 GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
8932 GEN_VXFORM_NOA(vexptefp, 5, 6),
8933 GEN_VXFORM_NOA(vlogefp, 5, 7),
8934 GEN_VXFORM_NOA(vrfim, 5, 8),
8935 GEN_VXFORM_NOA(vrfin, 5, 9),
8936 GEN_VXFORM_NOA(vrfip, 5, 10),
8937 GEN_VXFORM_NOA(vrfiz, 5, 11),
8939 #undef GEN_VXFORM_UIMM
8940 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
8941 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8942 GEN_VXFORM_UIMM(vspltb, 6, 8),
8943 GEN_VXFORM_UIMM(vsplth, 6, 9),
8944 GEN_VXFORM_UIMM(vspltw, 6, 10),
8945 GEN_VXFORM_UIMM(vcfux, 5, 12),
8946 GEN_VXFORM_UIMM(vcfsx, 5, 13),
8947 GEN_VXFORM_UIMM(vctuxs, 5, 14),
8948 GEN_VXFORM_UIMM(vctsxs, 5, 15),
8950 #undef GEN_VAFORM_PAIRED
8951 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
8952 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
8953 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
8954 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
8955 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
8956 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
8957 GEN_VAFORM_PAIRED(vsel, vperm, 21),
8958 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
8960 #undef GEN_SPE
8961 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
8962 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type)
8963 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE),
8964 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE),
8965 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE),
8966 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE),
8967 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE),
8968 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE),
8969 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE),
8970 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE),
8971 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE),
8972 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE),
8973 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE),
8974 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE),
8975 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE),
8976 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE),
8977 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE),
8978 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE),
8979 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE),
8980 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE),
8981 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE),
8982 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE),
8983 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE),
8984 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE),
8985 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE),
8986 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE),
8987 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE),
8988 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE),
8989 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE),
8990 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE),
8991 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE),
8993 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE),
8994 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
8995 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
8996 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE),
8997 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE),
8998 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE),
8999 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9000 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9001 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9002 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9003 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9004 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9005 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9006 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9008 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9009 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9010 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9011 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9012 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9013 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9014 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9015 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9016 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9017 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9018 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9019 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9020 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9021 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9023 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9024 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9025 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9026 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9027 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9028 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9029 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9030 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9031 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9032 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9033 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9034 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9035 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9036 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9037 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9038 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9040 #undef GEN_SPEOP_LDST
9041 #define GEN_SPEOP_LDST(name, opc2, sh) \
9042 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9043 GEN_SPEOP_LDST(evldd, 0x00, 3),
9044 GEN_SPEOP_LDST(evldw, 0x01, 3),
9045 GEN_SPEOP_LDST(evldh, 0x02, 3),
9046 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9047 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9048 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9049 GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9050 GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9051 GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9052 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9053 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9055 GEN_SPEOP_LDST(evstdd, 0x10, 3),
9056 GEN_SPEOP_LDST(evstdw, 0x11, 3),
9057 GEN_SPEOP_LDST(evstdh, 0x12, 3),
9058 GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9059 GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9060 GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9061 GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9064 #include "translate_init.c"
9065 #include "helper_regs.h"
9067 /*****************************************************************************/
9068 /* Misc PowerPC helpers */
9069 void cpu_dump_state (CPUState *env, FILE *f, fprintf_function cpu_fprintf,
9070 int flags)
9072 #define RGPL 4
9073 #define RFPL 4
9075 int i;
9077 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9078 TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9079 env->nip, env->lr, env->ctr, env->xer);
9080 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9081 TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9082 env->hflags, env->mmu_idx);
9083 #if !defined(NO_TIMER_DUMP)
9084 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9085 #if !defined(CONFIG_USER_ONLY)
9086 " DECR %08" PRIu32
9087 #endif
9088 "\n",
9089 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9090 #if !defined(CONFIG_USER_ONLY)
9091 , cpu_ppc_load_decr(env)
9092 #endif
9094 #endif
9095 for (i = 0; i < 32; i++) {
9096 if ((i & (RGPL - 1)) == 0)
9097 cpu_fprintf(f, "GPR%02d", i);
9098 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9099 if ((i & (RGPL - 1)) == (RGPL - 1))
9100 cpu_fprintf(f, "\n");
9102 cpu_fprintf(f, "CR ");
9103 for (i = 0; i < 8; i++)
9104 cpu_fprintf(f, "%01x", env->crf[i]);
9105 cpu_fprintf(f, " [");
9106 for (i = 0; i < 8; i++) {
9107 char a = '-';
9108 if (env->crf[i] & 0x08)
9109 a = 'L';
9110 else if (env->crf[i] & 0x04)
9111 a = 'G';
9112 else if (env->crf[i] & 0x02)
9113 a = 'E';
9114 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9116 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9117 env->reserve_addr);
9118 for (i = 0; i < 32; i++) {
9119 if ((i & (RFPL - 1)) == 0)
9120 cpu_fprintf(f, "FPR%02d", i);
9121 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9122 if ((i & (RFPL - 1)) == (RFPL - 1))
9123 cpu_fprintf(f, "\n");
9125 cpu_fprintf(f, "FPSCR %08x\n", env->fpscr);
9126 #if !defined(CONFIG_USER_ONLY)
9127 cpu_fprintf(f, "SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx " SDR1 "
9128 TARGET_FMT_lx "\n", env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9129 env->spr[SPR_SDR1]);
9130 #endif
9132 #undef RGPL
9133 #undef RFPL
9136 void cpu_dump_statistics (CPUState *env, FILE*f, fprintf_function cpu_fprintf,
9137 int flags)
9139 #if defined(DO_PPC_STATISTICS)
9140 opc_handler_t **t1, **t2, **t3, *handler;
9141 int op1, op2, op3;
9143 t1 = env->opcodes;
9144 for (op1 = 0; op1 < 64; op1++) {
9145 handler = t1[op1];
9146 if (is_indirect_opcode(handler)) {
9147 t2 = ind_table(handler);
9148 for (op2 = 0; op2 < 32; op2++) {
9149 handler = t2[op2];
9150 if (is_indirect_opcode(handler)) {
9151 t3 = ind_table(handler);
9152 for (op3 = 0; op3 < 32; op3++) {
9153 handler = t3[op3];
9154 if (handler->count == 0)
9155 continue;
9156 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9157 "%016" PRIx64 " %" PRId64 "\n",
9158 op1, op2, op3, op1, (op3 << 5) | op2,
9159 handler->oname,
9160 handler->count, handler->count);
9162 } else {
9163 if (handler->count == 0)
9164 continue;
9165 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9166 "%016" PRIx64 " %" PRId64 "\n",
9167 op1, op2, op1, op2, handler->oname,
9168 handler->count, handler->count);
9171 } else {
9172 if (handler->count == 0)
9173 continue;
9174 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9175 " %" PRId64 "\n",
9176 op1, op1, handler->oname,
9177 handler->count, handler->count);
9180 #endif
9183 /*****************************************************************************/
9184 static inline void gen_intermediate_code_internal(CPUState *env,
9185 TranslationBlock *tb,
9186 int search_pc)
9188 DisasContext ctx, *ctxp = &ctx;
9189 opc_handler_t **table, *handler;
9190 target_ulong pc_start;
9191 uint16_t *gen_opc_end;
9192 CPUBreakpoint *bp;
9193 int j, lj = -1;
9194 int num_insns;
9195 int max_insns;
9197 pc_start = tb->pc;
9198 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
9199 ctx.nip = pc_start;
9200 ctx.tb = tb;
9201 ctx.exception = POWERPC_EXCP_NONE;
9202 ctx.spr_cb = env->spr_cb;
9203 ctx.mem_idx = env->mmu_idx;
9204 ctx.access_type = -1;
9205 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9206 #if defined(TARGET_PPC64)
9207 ctx.sf_mode = msr_sf;
9208 #endif
9209 ctx.fpu_enabled = msr_fp;
9210 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9211 ctx.spe_enabled = msr_spe;
9212 else
9213 ctx.spe_enabled = 0;
9214 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9215 ctx.altivec_enabled = msr_vr;
9216 else
9217 ctx.altivec_enabled = 0;
9218 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9219 ctx.singlestep_enabled = CPU_SINGLE_STEP;
9220 else
9221 ctx.singlestep_enabled = 0;
9222 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9223 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9224 if (unlikely(env->singlestep_enabled))
9225 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9226 #if defined (DO_SINGLE_STEP) && 0
9227 /* Single step trace mode */
9228 msr_se = 1;
9229 #endif
9230 num_insns = 0;
9231 max_insns = tb->cflags & CF_COUNT_MASK;
9232 if (max_insns == 0)
9233 max_insns = CF_COUNT_MASK;
9235 gen_icount_start();
9236 /* Set env in case of segfault during code fetch */
9237 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
9238 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9239 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9240 if (bp->pc == ctx.nip) {
9241 gen_debug_exception(ctxp);
9242 break;
9246 if (unlikely(search_pc)) {
9247 j = gen_opc_ptr - gen_opc_buf;
9248 if (lj < j) {
9249 lj++;
9250 while (lj < j)
9251 gen_opc_instr_start[lj++] = 0;
9253 gen_opc_pc[lj] = ctx.nip;
9254 gen_opc_instr_start[lj] = 1;
9255 gen_opc_icount[lj] = num_insns;
9257 LOG_DISAS("----------------\n");
9258 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9259 ctx.nip, ctx.mem_idx, (int)msr_ir);
9260 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9261 gen_io_start();
9262 if (unlikely(ctx.le_mode)) {
9263 ctx.opcode = bswap32(ldl_code(ctx.nip));
9264 } else {
9265 ctx.opcode = ldl_code(ctx.nip);
9267 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9268 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9269 opc3(ctx.opcode), little_endian ? "little" : "big");
9270 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
9271 tcg_gen_debug_insn_start(ctx.nip);
9272 ctx.nip += 4;
9273 table = env->opcodes;
9274 num_insns++;
9275 handler = table[opc1(ctx.opcode)];
9276 if (is_indirect_opcode(handler)) {
9277 table = ind_table(handler);
9278 handler = table[opc2(ctx.opcode)];
9279 if (is_indirect_opcode(handler)) {
9280 table = ind_table(handler);
9281 handler = table[opc3(ctx.opcode)];
9284 /* Is opcode *REALLY* valid ? */
9285 if (unlikely(handler->handler == &gen_invalid)) {
9286 if (qemu_log_enabled()) {
9287 qemu_log("invalid/unsupported opcode: "
9288 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9289 opc1(ctx.opcode), opc2(ctx.opcode),
9290 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9292 } else {
9293 if (unlikely((ctx.opcode & handler->inval) != 0)) {
9294 if (qemu_log_enabled()) {
9295 qemu_log("invalid bits: %08x for opcode: "
9296 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9297 ctx.opcode & handler->inval, opc1(ctx.opcode),
9298 opc2(ctx.opcode), opc3(ctx.opcode),
9299 ctx.opcode, ctx.nip - 4);
9301 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9302 break;
9305 (*(handler->handler))(&ctx);
9306 #if defined(DO_PPC_STATISTICS)
9307 handler->count++;
9308 #endif
9309 /* Check trace mode exceptions */
9310 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9311 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9312 ctx.exception != POWERPC_SYSCALL &&
9313 ctx.exception != POWERPC_EXCP_TRAP &&
9314 ctx.exception != POWERPC_EXCP_BRANCH)) {
9315 gen_exception(ctxp, POWERPC_EXCP_TRACE);
9316 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9317 (env->singlestep_enabled) ||
9318 singlestep ||
9319 num_insns >= max_insns)) {
9320 /* if we reach a page boundary or are single stepping, stop
9321 * generation
9323 break;
9326 if (tb->cflags & CF_LAST_IO)
9327 gen_io_end();
9328 if (ctx.exception == POWERPC_EXCP_NONE) {
9329 gen_goto_tb(&ctx, 0, ctx.nip);
9330 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9331 if (unlikely(env->singlestep_enabled)) {
9332 gen_debug_exception(ctxp);
9334 /* Generate the return instruction */
9335 tcg_gen_exit_tb(0);
9337 gen_icount_end(tb, num_insns);
9338 *gen_opc_ptr = INDEX_op_end;
9339 if (unlikely(search_pc)) {
9340 j = gen_opc_ptr - gen_opc_buf;
9341 lj++;
9342 while (lj <= j)
9343 gen_opc_instr_start[lj++] = 0;
9344 } else {
9345 tb->size = ctx.nip - pc_start;
9346 tb->icount = num_insns;
9348 #if defined(DEBUG_DISAS)
9349 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9350 int flags;
9351 flags = env->bfd_mach;
9352 flags |= ctx.le_mode << 16;
9353 qemu_log("IN: %s\n", lookup_symbol(pc_start));
9354 log_target_disas(pc_start, ctx.nip - pc_start, flags);
9355 qemu_log("\n");
9357 #endif
9360 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
9362 gen_intermediate_code_internal(env, tb, 0);
9365 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
9367 gen_intermediate_code_internal(env, tb, 1);
9370 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
9372 env->nip = gen_opc_pc[pc_pos];