Merge remote-tracking branch 'bonzini/scsi.2' into staging
[qemu.git] / target-ppc / translate.c
blob9b3f90c85868087307a67e1f44a806a247e85be3
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "tcg-op.h"
30 #include "qemu-common.h"
31 #include "host-utils.h"
33 #include "helper.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define CPU_SINGLE_STEP 0x1
38 #define CPU_BRANCH_STEP 0x2
39 #define GDBSTUB_SINGLE_STEP 0x4
41 /* Include definitions for instructions classes and implementations flags */
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
45 #ifdef PPC_DEBUG_DISAS
46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
47 #else
48 # define LOG_DISAS(...) do { } while (0)
49 #endif
50 /*****************************************************************************/
51 /* Code translation helpers */
53 /* global register indexes */
54 static TCGv_ptr cpu_env;
55 static char cpu_reg_names[10*3 + 22*4 /* GPR */
56 #if !defined(TARGET_PPC64)
57 + 10*4 + 22*5 /* SPE GPRh */
58 #endif
59 + 10*4 + 22*5 /* FPR */
60 + 2*(10*6 + 22*7) /* AVRh, AVRl */
61 + 8*5 /* CRF */];
62 static TCGv cpu_gpr[32];
63 #if !defined(TARGET_PPC64)
64 static TCGv cpu_gprh[32];
65 #endif
66 static TCGv_i64 cpu_fpr[32];
67 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
68 static TCGv_i32 cpu_crf[8];
69 static TCGv cpu_nip;
70 static TCGv cpu_msr;
71 static TCGv cpu_ctr;
72 static TCGv cpu_lr;
73 static TCGv cpu_xer;
74 static TCGv cpu_reserve;
75 static TCGv_i32 cpu_fpscr;
76 static TCGv_i32 cpu_access_type;
78 #include "gen-icount.h"
80 void ppc_translate_init(void)
82 int i;
83 char* p;
84 size_t cpu_reg_names_size;
85 static int done_init = 0;
87 if (done_init)
88 return;
90 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
92 p = cpu_reg_names;
93 cpu_reg_names_size = sizeof(cpu_reg_names);
95 for (i = 0; i < 8; i++) {
96 snprintf(p, cpu_reg_names_size, "crf%d", i);
97 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
98 offsetof(CPUState, crf[i]), p);
99 p += 5;
100 cpu_reg_names_size -= 5;
103 for (i = 0; i < 32; i++) {
104 snprintf(p, cpu_reg_names_size, "r%d", i);
105 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
106 offsetof(CPUState, gpr[i]), p);
107 p += (i < 10) ? 3 : 4;
108 cpu_reg_names_size -= (i < 10) ? 3 : 4;
109 #if !defined(TARGET_PPC64)
110 snprintf(p, cpu_reg_names_size, "r%dH", i);
111 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
112 offsetof(CPUState, gprh[i]), p);
113 p += (i < 10) ? 4 : 5;
114 cpu_reg_names_size -= (i < 10) ? 4 : 5;
115 #endif
117 snprintf(p, cpu_reg_names_size, "fp%d", i);
118 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
119 offsetof(CPUState, fpr[i]), p);
120 p += (i < 10) ? 4 : 5;
121 cpu_reg_names_size -= (i < 10) ? 4 : 5;
123 snprintf(p, cpu_reg_names_size, "avr%dH", i);
124 #ifdef HOST_WORDS_BIGENDIAN
125 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
126 offsetof(CPUState, avr[i].u64[0]), p);
127 #else
128 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
129 offsetof(CPUState, avr[i].u64[1]), p);
130 #endif
131 p += (i < 10) ? 6 : 7;
132 cpu_reg_names_size -= (i < 10) ? 6 : 7;
134 snprintf(p, cpu_reg_names_size, "avr%dL", i);
135 #ifdef HOST_WORDS_BIGENDIAN
136 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
137 offsetof(CPUState, avr[i].u64[1]), p);
138 #else
139 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
140 offsetof(CPUState, avr[i].u64[0]), p);
141 #endif
142 p += (i < 10) ? 6 : 7;
143 cpu_reg_names_size -= (i < 10) ? 6 : 7;
146 cpu_nip = tcg_global_mem_new(TCG_AREG0,
147 offsetof(CPUState, nip), "nip");
149 cpu_msr = tcg_global_mem_new(TCG_AREG0,
150 offsetof(CPUState, msr), "msr");
152 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
153 offsetof(CPUState, ctr), "ctr");
155 cpu_lr = tcg_global_mem_new(TCG_AREG0,
156 offsetof(CPUState, lr), "lr");
158 cpu_xer = tcg_global_mem_new(TCG_AREG0,
159 offsetof(CPUState, xer), "xer");
161 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
162 offsetof(CPUState, reserve_addr),
163 "reserve_addr");
165 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
166 offsetof(CPUState, fpscr), "fpscr");
168 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
169 offsetof(CPUState, access_type), "access_type");
171 /* register helpers */
172 #define GEN_HELPER 2
173 #include "helper.h"
175 done_init = 1;
178 /* internal defines */
179 typedef struct DisasContext {
180 struct TranslationBlock *tb;
181 target_ulong nip;
182 uint32_t opcode;
183 uint32_t exception;
184 /* Routine used to access memory */
185 int mem_idx;
186 int access_type;
187 /* Translation flags */
188 int le_mode;
189 #if defined(TARGET_PPC64)
190 int sf_mode;
191 #endif
192 int fpu_enabled;
193 int altivec_enabled;
194 int spe_enabled;
195 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
196 int singlestep_enabled;
197 } DisasContext;
199 struct opc_handler_t {
200 /* invalid bits */
201 uint32_t inval;
202 /* instruction type */
203 uint64_t type;
204 /* extended instruction type */
205 uint64_t type2;
206 /* handler */
207 void (*handler)(DisasContext *ctx);
208 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
209 const char *oname;
210 #endif
211 #if defined(DO_PPC_STATISTICS)
212 uint64_t count;
213 #endif
216 static inline void gen_reset_fpstatus(void)
218 #ifdef CONFIG_SOFTFLOAT
219 gen_helper_reset_fpstatus();
220 #endif
223 static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
225 TCGv_i32 t0 = tcg_temp_new_i32();
227 if (set_fprf != 0) {
228 /* This case might be optimized later */
229 tcg_gen_movi_i32(t0, 1);
230 gen_helper_compute_fprf(t0, arg, t0);
231 if (unlikely(set_rc)) {
232 tcg_gen_mov_i32(cpu_crf[1], t0);
234 gen_helper_float_check_status();
235 } else if (unlikely(set_rc)) {
236 /* We always need to compute fpcc */
237 tcg_gen_movi_i32(t0, 0);
238 gen_helper_compute_fprf(t0, arg, t0);
239 tcg_gen_mov_i32(cpu_crf[1], t0);
242 tcg_temp_free_i32(t0);
245 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
247 if (ctx->access_type != access_type) {
248 tcg_gen_movi_i32(cpu_access_type, access_type);
249 ctx->access_type = access_type;
253 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
255 #if defined(TARGET_PPC64)
256 if (ctx->sf_mode)
257 tcg_gen_movi_tl(cpu_nip, nip);
258 else
259 #endif
260 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
263 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
265 TCGv_i32 t0, t1;
266 if (ctx->exception == POWERPC_EXCP_NONE) {
267 gen_update_nip(ctx, ctx->nip);
269 t0 = tcg_const_i32(excp);
270 t1 = tcg_const_i32(error);
271 gen_helper_raise_exception_err(t0, t1);
272 tcg_temp_free_i32(t0);
273 tcg_temp_free_i32(t1);
274 ctx->exception = (excp);
277 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
279 TCGv_i32 t0;
280 if (ctx->exception == POWERPC_EXCP_NONE) {
281 gen_update_nip(ctx, ctx->nip);
283 t0 = tcg_const_i32(excp);
284 gen_helper_raise_exception(t0);
285 tcg_temp_free_i32(t0);
286 ctx->exception = (excp);
289 static inline void gen_debug_exception(DisasContext *ctx)
291 TCGv_i32 t0;
293 if (ctx->exception != POWERPC_EXCP_BRANCH)
294 gen_update_nip(ctx, ctx->nip);
295 t0 = tcg_const_i32(EXCP_DEBUG);
296 gen_helper_raise_exception(t0);
297 tcg_temp_free_i32(t0);
300 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
302 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
305 /* Stop translation */
306 static inline void gen_stop_exception(DisasContext *ctx)
308 gen_update_nip(ctx, ctx->nip);
309 ctx->exception = POWERPC_EXCP_STOP;
312 /* No need to update nip here, as execution flow will change */
313 static inline void gen_sync_exception(DisasContext *ctx)
315 ctx->exception = POWERPC_EXCP_SYNC;
318 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
319 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
321 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
322 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
324 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
325 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
327 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
328 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
330 typedef struct opcode_t {
331 unsigned char opc1, opc2, opc3;
332 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
333 unsigned char pad[5];
334 #else
335 unsigned char pad[1];
336 #endif
337 opc_handler_t handler;
338 const char *oname;
339 } opcode_t;
341 /*****************************************************************************/
342 /*** Instruction decoding ***/
343 #define EXTRACT_HELPER(name, shift, nb) \
344 static inline uint32_t name(uint32_t opcode) \
346 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
349 #define EXTRACT_SHELPER(name, shift, nb) \
350 static inline int32_t name(uint32_t opcode) \
352 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
355 /* Opcode part 1 */
356 EXTRACT_HELPER(opc1, 26, 6);
357 /* Opcode part 2 */
358 EXTRACT_HELPER(opc2, 1, 5);
359 /* Opcode part 3 */
360 EXTRACT_HELPER(opc3, 6, 5);
361 /* Update Cr0 flags */
362 EXTRACT_HELPER(Rc, 0, 1);
363 /* Destination */
364 EXTRACT_HELPER(rD, 21, 5);
365 /* Source */
366 EXTRACT_HELPER(rS, 21, 5);
367 /* First operand */
368 EXTRACT_HELPER(rA, 16, 5);
369 /* Second operand */
370 EXTRACT_HELPER(rB, 11, 5);
371 /* Third operand */
372 EXTRACT_HELPER(rC, 6, 5);
373 /*** Get CRn ***/
374 EXTRACT_HELPER(crfD, 23, 3);
375 EXTRACT_HELPER(crfS, 18, 3);
376 EXTRACT_HELPER(crbD, 21, 5);
377 EXTRACT_HELPER(crbA, 16, 5);
378 EXTRACT_HELPER(crbB, 11, 5);
379 /* SPR / TBL */
380 EXTRACT_HELPER(_SPR, 11, 10);
381 static inline uint32_t SPR(uint32_t opcode)
383 uint32_t sprn = _SPR(opcode);
385 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
387 /*** Get constants ***/
388 EXTRACT_HELPER(IMM, 12, 8);
389 /* 16 bits signed immediate value */
390 EXTRACT_SHELPER(SIMM, 0, 16);
391 /* 16 bits unsigned immediate value */
392 EXTRACT_HELPER(UIMM, 0, 16);
393 /* 5 bits signed immediate value */
394 EXTRACT_HELPER(SIMM5, 16, 5);
395 /* 5 bits signed immediate value */
396 EXTRACT_HELPER(UIMM5, 16, 5);
397 /* Bit count */
398 EXTRACT_HELPER(NB, 11, 5);
399 /* Shift count */
400 EXTRACT_HELPER(SH, 11, 5);
401 /* Vector shift count */
402 EXTRACT_HELPER(VSH, 6, 4);
403 /* Mask start */
404 EXTRACT_HELPER(MB, 6, 5);
405 /* Mask end */
406 EXTRACT_HELPER(ME, 1, 5);
407 /* Trap operand */
408 EXTRACT_HELPER(TO, 21, 5);
410 EXTRACT_HELPER(CRM, 12, 8);
411 EXTRACT_HELPER(FM, 17, 8);
412 EXTRACT_HELPER(SR, 16, 4);
413 EXTRACT_HELPER(FPIMM, 12, 4);
415 /*** Jump target decoding ***/
416 /* Displacement */
417 EXTRACT_SHELPER(d, 0, 16);
418 /* Immediate address */
419 static inline target_ulong LI(uint32_t opcode)
421 return (opcode >> 0) & 0x03FFFFFC;
424 static inline uint32_t BD(uint32_t opcode)
426 return (opcode >> 0) & 0xFFFC;
429 EXTRACT_HELPER(BO, 21, 5);
430 EXTRACT_HELPER(BI, 16, 5);
431 /* Absolute/relative address */
432 EXTRACT_HELPER(AA, 1, 1);
433 /* Link */
434 EXTRACT_HELPER(LK, 0, 1);
436 /* Create a mask between <start> and <end> bits */
437 static inline target_ulong MASK(uint32_t start, uint32_t end)
439 target_ulong ret;
441 #if defined(TARGET_PPC64)
442 if (likely(start == 0)) {
443 ret = UINT64_MAX << (63 - end);
444 } else if (likely(end == 63)) {
445 ret = UINT64_MAX >> start;
447 #else
448 if (likely(start == 0)) {
449 ret = UINT32_MAX << (31 - end);
450 } else if (likely(end == 31)) {
451 ret = UINT32_MAX >> start;
453 #endif
454 else {
455 ret = (((target_ulong)(-1ULL)) >> (start)) ^
456 (((target_ulong)(-1ULL) >> (end)) >> 1);
457 if (unlikely(start > end))
458 return ~ret;
461 return ret;
464 /*****************************************************************************/
465 /* PowerPC instructions table */
467 #if defined(DO_PPC_STATISTICS)
468 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
470 .opc1 = op1, \
471 .opc2 = op2, \
472 .opc3 = op3, \
473 .pad = { 0, }, \
474 .handler = { \
475 .inval = invl, \
476 .type = _typ, \
477 .type2 = _typ2, \
478 .handler = &gen_##name, \
479 .oname = stringify(name), \
480 }, \
481 .oname = stringify(name), \
483 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
485 .opc1 = op1, \
486 .opc2 = op2, \
487 .opc3 = op3, \
488 .pad = { 0, }, \
489 .handler = { \
490 .inval = invl, \
491 .type = _typ, \
492 .type2 = _typ2, \
493 .handler = &gen_##name, \
494 .oname = onam, \
495 }, \
496 .oname = onam, \
498 #else
499 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
501 .opc1 = op1, \
502 .opc2 = op2, \
503 .opc3 = op3, \
504 .pad = { 0, }, \
505 .handler = { \
506 .inval = invl, \
507 .type = _typ, \
508 .type2 = _typ2, \
509 .handler = &gen_##name, \
510 }, \
511 .oname = stringify(name), \
513 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
515 .opc1 = op1, \
516 .opc2 = op2, \
517 .opc3 = op3, \
518 .pad = { 0, }, \
519 .handler = { \
520 .inval = invl, \
521 .type = _typ, \
522 .type2 = _typ2, \
523 .handler = &gen_##name, \
524 }, \
525 .oname = onam, \
527 #endif
529 /* SPR load/store helpers */
530 static inline void gen_load_spr(TCGv t, int reg)
532 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
535 static inline void gen_store_spr(int reg, TCGv t)
537 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
540 /* Invalid instruction */
541 static void gen_invalid(DisasContext *ctx)
543 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
546 static opc_handler_t invalid_handler = {
547 .inval = 0xFFFFFFFF,
548 .type = PPC_NONE,
549 .type2 = PPC_NONE,
550 .handler = gen_invalid,
553 /*** Integer comparison ***/
555 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
557 int l1, l2, l3;
559 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
560 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
561 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
563 l1 = gen_new_label();
564 l2 = gen_new_label();
565 l3 = gen_new_label();
566 if (s) {
567 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
568 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
569 } else {
570 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
571 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
573 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
574 tcg_gen_br(l3);
575 gen_set_label(l1);
576 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
577 tcg_gen_br(l3);
578 gen_set_label(l2);
579 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
580 gen_set_label(l3);
583 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
585 TCGv t0 = tcg_const_local_tl(arg1);
586 gen_op_cmp(arg0, t0, s, crf);
587 tcg_temp_free(t0);
590 #if defined(TARGET_PPC64)
591 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
593 TCGv t0, t1;
594 t0 = tcg_temp_local_new();
595 t1 = tcg_temp_local_new();
596 if (s) {
597 tcg_gen_ext32s_tl(t0, arg0);
598 tcg_gen_ext32s_tl(t1, arg1);
599 } else {
600 tcg_gen_ext32u_tl(t0, arg0);
601 tcg_gen_ext32u_tl(t1, arg1);
603 gen_op_cmp(t0, t1, s, crf);
604 tcg_temp_free(t1);
605 tcg_temp_free(t0);
608 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
610 TCGv t0 = tcg_const_local_tl(arg1);
611 gen_op_cmp32(arg0, t0, s, crf);
612 tcg_temp_free(t0);
614 #endif
616 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
618 #if defined(TARGET_PPC64)
619 if (!(ctx->sf_mode))
620 gen_op_cmpi32(reg, 0, 1, 0);
621 else
622 #endif
623 gen_op_cmpi(reg, 0, 1, 0);
626 /* cmp */
627 static void gen_cmp(DisasContext *ctx)
629 #if defined(TARGET_PPC64)
630 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
631 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
632 1, crfD(ctx->opcode));
633 else
634 #endif
635 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
636 1, crfD(ctx->opcode));
639 /* cmpi */
640 static void gen_cmpi(DisasContext *ctx)
642 #if defined(TARGET_PPC64)
643 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
644 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
645 1, crfD(ctx->opcode));
646 else
647 #endif
648 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
649 1, crfD(ctx->opcode));
652 /* cmpl */
653 static void gen_cmpl(DisasContext *ctx)
655 #if defined(TARGET_PPC64)
656 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
657 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
658 0, crfD(ctx->opcode));
659 else
660 #endif
661 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
662 0, crfD(ctx->opcode));
665 /* cmpli */
666 static void gen_cmpli(DisasContext *ctx)
668 #if defined(TARGET_PPC64)
669 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
670 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
671 0, crfD(ctx->opcode));
672 else
673 #endif
674 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
675 0, crfD(ctx->opcode));
678 /* isel (PowerPC 2.03 specification) */
679 static void gen_isel(DisasContext *ctx)
681 int l1, l2;
682 uint32_t bi = rC(ctx->opcode);
683 uint32_t mask;
684 TCGv_i32 t0;
686 l1 = gen_new_label();
687 l2 = gen_new_label();
689 mask = 1 << (3 - (bi & 0x03));
690 t0 = tcg_temp_new_i32();
691 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
692 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
693 if (rA(ctx->opcode) == 0)
694 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
695 else
696 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
697 tcg_gen_br(l2);
698 gen_set_label(l1);
699 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
700 gen_set_label(l2);
701 tcg_temp_free_i32(t0);
704 /*** Integer arithmetic ***/
706 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
707 TCGv arg1, TCGv arg2, int sub)
709 int l1;
710 TCGv t0;
712 l1 = gen_new_label();
713 /* Start with XER OV disabled, the most likely case */
714 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
715 t0 = tcg_temp_local_new();
716 tcg_gen_xor_tl(t0, arg0, arg1);
717 #if defined(TARGET_PPC64)
718 if (!ctx->sf_mode)
719 tcg_gen_ext32s_tl(t0, t0);
720 #endif
721 if (sub)
722 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
723 else
724 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
725 tcg_gen_xor_tl(t0, arg1, arg2);
726 #if defined(TARGET_PPC64)
727 if (!ctx->sf_mode)
728 tcg_gen_ext32s_tl(t0, t0);
729 #endif
730 if (sub)
731 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
732 else
733 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
734 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
735 gen_set_label(l1);
736 tcg_temp_free(t0);
739 static inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1,
740 TCGv arg2, int sub)
742 int l1 = gen_new_label();
744 #if defined(TARGET_PPC64)
745 if (!(ctx->sf_mode)) {
746 TCGv t0, t1;
747 t0 = tcg_temp_new();
748 t1 = tcg_temp_new();
750 tcg_gen_ext32u_tl(t0, arg1);
751 tcg_gen_ext32u_tl(t1, arg2);
752 if (sub) {
753 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
754 } else {
755 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
757 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
758 gen_set_label(l1);
759 tcg_temp_free(t0);
760 tcg_temp_free(t1);
761 } else
762 #endif
764 if (sub) {
765 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
766 } else {
767 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
769 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
770 gen_set_label(l1);
774 /* Common add function */
775 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
776 TCGv arg2, int add_ca, int compute_ca,
777 int compute_ov)
779 TCGv t0, t1;
781 if ((!compute_ca && !compute_ov) ||
782 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
783 t0 = ret;
784 } else {
785 t0 = tcg_temp_local_new();
788 if (add_ca) {
789 t1 = tcg_temp_local_new();
790 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
791 tcg_gen_shri_tl(t1, t1, XER_CA);
792 } else {
793 TCGV_UNUSED(t1);
796 if (compute_ca && compute_ov) {
797 /* Start with XER CA and OV disabled, the most likely case */
798 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
799 } else if (compute_ca) {
800 /* Start with XER CA disabled, the most likely case */
801 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
802 } else if (compute_ov) {
803 /* Start with XER OV disabled, the most likely case */
804 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
807 tcg_gen_add_tl(t0, arg1, arg2);
809 if (compute_ca) {
810 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
812 if (add_ca) {
813 tcg_gen_add_tl(t0, t0, t1);
814 gen_op_arith_compute_ca(ctx, t0, t1, 0);
815 tcg_temp_free(t1);
817 if (compute_ov) {
818 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
821 if (unlikely(Rc(ctx->opcode) != 0))
822 gen_set_Rc0(ctx, t0);
824 if (!TCGV_EQUAL(t0, ret)) {
825 tcg_gen_mov_tl(ret, t0);
826 tcg_temp_free(t0);
829 /* Add functions with two operands */
830 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
831 static void glue(gen_, name)(DisasContext *ctx) \
833 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
834 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
835 add_ca, compute_ca, compute_ov); \
837 /* Add functions with one operand and one immediate */
838 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
839 add_ca, compute_ca, compute_ov) \
840 static void glue(gen_, name)(DisasContext *ctx) \
842 TCGv t0 = tcg_const_local_tl(const_val); \
843 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
844 cpu_gpr[rA(ctx->opcode)], t0, \
845 add_ca, compute_ca, compute_ov); \
846 tcg_temp_free(t0); \
849 /* add add. addo addo. */
850 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
851 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
852 /* addc addc. addco addco. */
853 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
854 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
855 /* adde adde. addeo addeo. */
856 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
857 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
858 /* addme addme. addmeo addmeo. */
859 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
860 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
861 /* addze addze. addzeo addzeo.*/
862 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
863 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
864 /* addi */
865 static void gen_addi(DisasContext *ctx)
867 target_long simm = SIMM(ctx->opcode);
869 if (rA(ctx->opcode) == 0) {
870 /* li case */
871 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
872 } else {
873 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
876 /* addic addic.*/
877 static inline void gen_op_addic(DisasContext *ctx, TCGv ret, TCGv arg1,
878 int compute_Rc0)
880 target_long simm = SIMM(ctx->opcode);
882 /* Start with XER CA and OV disabled, the most likely case */
883 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
885 if (likely(simm != 0)) {
886 TCGv t0 = tcg_temp_local_new();
887 tcg_gen_addi_tl(t0, arg1, simm);
888 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
889 tcg_gen_mov_tl(ret, t0);
890 tcg_temp_free(t0);
891 } else {
892 tcg_gen_mov_tl(ret, arg1);
894 if (compute_Rc0) {
895 gen_set_Rc0(ctx, ret);
899 static void gen_addic(DisasContext *ctx)
901 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
904 static void gen_addic_(DisasContext *ctx)
906 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
909 /* addis */
910 static void gen_addis(DisasContext *ctx)
912 target_long simm = SIMM(ctx->opcode);
914 if (rA(ctx->opcode) == 0) {
915 /* lis case */
916 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
917 } else {
918 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
922 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
923 TCGv arg2, int sign, int compute_ov)
925 int l1 = gen_new_label();
926 int l2 = gen_new_label();
927 TCGv_i32 t0 = tcg_temp_local_new_i32();
928 TCGv_i32 t1 = tcg_temp_local_new_i32();
930 tcg_gen_trunc_tl_i32(t0, arg1);
931 tcg_gen_trunc_tl_i32(t1, arg2);
932 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
933 if (sign) {
934 int l3 = gen_new_label();
935 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
936 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
937 gen_set_label(l3);
938 tcg_gen_div_i32(t0, t0, t1);
939 } else {
940 tcg_gen_divu_i32(t0, t0, t1);
942 if (compute_ov) {
943 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
945 tcg_gen_br(l2);
946 gen_set_label(l1);
947 if (sign) {
948 tcg_gen_sari_i32(t0, t0, 31);
949 } else {
950 tcg_gen_movi_i32(t0, 0);
952 if (compute_ov) {
953 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
955 gen_set_label(l2);
956 tcg_gen_extu_i32_tl(ret, t0);
957 tcg_temp_free_i32(t0);
958 tcg_temp_free_i32(t1);
959 if (unlikely(Rc(ctx->opcode) != 0))
960 gen_set_Rc0(ctx, ret);
962 /* Div functions */
963 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
964 static void glue(gen_, name)(DisasContext *ctx) \
966 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
967 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
968 sign, compute_ov); \
970 /* divwu divwu. divwuo divwuo. */
971 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
972 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
973 /* divw divw. divwo divwo. */
974 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
975 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
976 #if defined(TARGET_PPC64)
977 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
978 TCGv arg2, int sign, int compute_ov)
980 int l1 = gen_new_label();
981 int l2 = gen_new_label();
983 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
984 if (sign) {
985 int l3 = gen_new_label();
986 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
987 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
988 gen_set_label(l3);
989 tcg_gen_div_i64(ret, arg1, arg2);
990 } else {
991 tcg_gen_divu_i64(ret, arg1, arg2);
993 if (compute_ov) {
994 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
996 tcg_gen_br(l2);
997 gen_set_label(l1);
998 if (sign) {
999 tcg_gen_sari_i64(ret, arg1, 63);
1000 } else {
1001 tcg_gen_movi_i64(ret, 0);
1003 if (compute_ov) {
1004 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1006 gen_set_label(l2);
1007 if (unlikely(Rc(ctx->opcode) != 0))
1008 gen_set_Rc0(ctx, ret);
1010 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1011 static void glue(gen_, name)(DisasContext *ctx) \
1013 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1014 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1015 sign, compute_ov); \
1017 /* divwu divwu. divwuo divwuo. */
1018 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1019 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1020 /* divw divw. divwo divwo. */
1021 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1022 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1023 #endif
1025 /* mulhw mulhw. */
1026 static void gen_mulhw(DisasContext *ctx)
1028 TCGv_i64 t0, t1;
1030 t0 = tcg_temp_new_i64();
1031 t1 = tcg_temp_new_i64();
1032 #if defined(TARGET_PPC64)
1033 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1034 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1035 tcg_gen_mul_i64(t0, t0, t1);
1036 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1037 #else
1038 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1039 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1040 tcg_gen_mul_i64(t0, t0, t1);
1041 tcg_gen_shri_i64(t0, t0, 32);
1042 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1043 #endif
1044 tcg_temp_free_i64(t0);
1045 tcg_temp_free_i64(t1);
1046 if (unlikely(Rc(ctx->opcode) != 0))
1047 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1050 /* mulhwu mulhwu. */
1051 static void gen_mulhwu(DisasContext *ctx)
1053 TCGv_i64 t0, t1;
1055 t0 = tcg_temp_new_i64();
1056 t1 = tcg_temp_new_i64();
1057 #if defined(TARGET_PPC64)
1058 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1059 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1060 tcg_gen_mul_i64(t0, t0, t1);
1061 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1062 #else
1063 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1064 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1065 tcg_gen_mul_i64(t0, t0, t1);
1066 tcg_gen_shri_i64(t0, t0, 32);
1067 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1068 #endif
1069 tcg_temp_free_i64(t0);
1070 tcg_temp_free_i64(t1);
1071 if (unlikely(Rc(ctx->opcode) != 0))
1072 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1075 /* mullw mullw. */
1076 static void gen_mullw(DisasContext *ctx)
1078 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1079 cpu_gpr[rB(ctx->opcode)]);
1080 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1081 if (unlikely(Rc(ctx->opcode) != 0))
1082 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1085 /* mullwo mullwo. */
1086 static void gen_mullwo(DisasContext *ctx)
1088 int l1;
1089 TCGv_i64 t0, t1;
1091 t0 = tcg_temp_new_i64();
1092 t1 = tcg_temp_new_i64();
1093 l1 = gen_new_label();
1094 /* Start with XER OV disabled, the most likely case */
1095 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1096 #if defined(TARGET_PPC64)
1097 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1098 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1099 #else
1100 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1101 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1102 #endif
1103 tcg_gen_mul_i64(t0, t0, t1);
1104 #if defined(TARGET_PPC64)
1105 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1106 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1107 #else
1108 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1109 tcg_gen_ext32s_i64(t1, t0);
1110 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1111 #endif
1112 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1113 gen_set_label(l1);
1114 tcg_temp_free_i64(t0);
1115 tcg_temp_free_i64(t1);
1116 if (unlikely(Rc(ctx->opcode) != 0))
1117 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1120 /* mulli */
1121 static void gen_mulli(DisasContext *ctx)
1123 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1124 SIMM(ctx->opcode));
1126 #if defined(TARGET_PPC64)
1127 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1128 static void glue(gen_, name)(DisasContext *ctx) \
1130 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1131 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1132 if (unlikely(Rc(ctx->opcode) != 0)) \
1133 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1135 /* mulhd mulhd. */
1136 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1137 /* mulhdu mulhdu. */
1138 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1140 /* mulld mulld. */
1141 static void gen_mulld(DisasContext *ctx)
1143 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1144 cpu_gpr[rB(ctx->opcode)]);
1145 if (unlikely(Rc(ctx->opcode) != 0))
1146 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1148 /* mulldo mulldo. */
1149 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1150 #endif
1152 /* neg neg. nego nego. */
1153 static inline void gen_op_arith_neg(DisasContext *ctx, TCGv ret, TCGv arg1,
1154 int ov_check)
1156 int l1 = gen_new_label();
1157 int l2 = gen_new_label();
1158 TCGv t0 = tcg_temp_local_new();
1159 #if defined(TARGET_PPC64)
1160 if (ctx->sf_mode) {
1161 tcg_gen_mov_tl(t0, arg1);
1162 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1163 } else
1164 #endif
1166 tcg_gen_ext32s_tl(t0, arg1);
1167 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1169 tcg_gen_neg_tl(ret, arg1);
1170 if (ov_check) {
1171 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1173 tcg_gen_br(l2);
1174 gen_set_label(l1);
1175 tcg_gen_mov_tl(ret, t0);
1176 if (ov_check) {
1177 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1179 gen_set_label(l2);
1180 tcg_temp_free(t0);
1181 if (unlikely(Rc(ctx->opcode) != 0))
1182 gen_set_Rc0(ctx, ret);
1185 static void gen_neg(DisasContext *ctx)
1187 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1190 static void gen_nego(DisasContext *ctx)
1192 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1195 /* Common subf function */
1196 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1197 TCGv arg2, int add_ca, int compute_ca,
1198 int compute_ov)
1200 TCGv t0, t1;
1202 if ((!compute_ca && !compute_ov) ||
1203 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1204 t0 = ret;
1205 } else {
1206 t0 = tcg_temp_local_new();
1209 if (add_ca) {
1210 t1 = tcg_temp_local_new();
1211 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1212 tcg_gen_shri_tl(t1, t1, XER_CA);
1213 } else {
1214 TCGV_UNUSED(t1);
1217 if (compute_ca && compute_ov) {
1218 /* Start with XER CA and OV disabled, the most likely case */
1219 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1220 } else if (compute_ca) {
1221 /* Start with XER CA disabled, the most likely case */
1222 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1223 } else if (compute_ov) {
1224 /* Start with XER OV disabled, the most likely case */
1225 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1228 if (add_ca) {
1229 tcg_gen_not_tl(t0, arg1);
1230 tcg_gen_add_tl(t0, t0, arg2);
1231 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1232 tcg_gen_add_tl(t0, t0, t1);
1233 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1234 tcg_temp_free(t1);
1235 } else {
1236 tcg_gen_sub_tl(t0, arg2, arg1);
1237 if (compute_ca) {
1238 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1241 if (compute_ov) {
1242 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1245 if (unlikely(Rc(ctx->opcode) != 0))
1246 gen_set_Rc0(ctx, t0);
1248 if (!TCGV_EQUAL(t0, ret)) {
1249 tcg_gen_mov_tl(ret, t0);
1250 tcg_temp_free(t0);
1253 /* Sub functions with Two operands functions */
1254 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1255 static void glue(gen_, name)(DisasContext *ctx) \
1257 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1258 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1259 add_ca, compute_ca, compute_ov); \
1261 /* Sub functions with one operand and one immediate */
1262 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1263 add_ca, compute_ca, compute_ov) \
1264 static void glue(gen_, name)(DisasContext *ctx) \
1266 TCGv t0 = tcg_const_local_tl(const_val); \
1267 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1268 cpu_gpr[rA(ctx->opcode)], t0, \
1269 add_ca, compute_ca, compute_ov); \
1270 tcg_temp_free(t0); \
1272 /* subf subf. subfo subfo. */
1273 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1274 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1275 /* subfc subfc. subfco subfco. */
1276 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1277 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1278 /* subfe subfe. subfeo subfo. */
1279 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1280 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1281 /* subfme subfme. subfmeo subfmeo. */
1282 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1283 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1284 /* subfze subfze. subfzeo subfzeo.*/
1285 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1286 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1288 /* subfic */
1289 static void gen_subfic(DisasContext *ctx)
1291 /* Start with XER CA and OV disabled, the most likely case */
1292 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1293 TCGv t0 = tcg_temp_local_new();
1294 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1295 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1296 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1297 tcg_temp_free(t1);
1298 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1299 tcg_temp_free(t0);
1302 /*** Integer logical ***/
1303 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1304 static void glue(gen_, name)(DisasContext *ctx) \
1306 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1307 cpu_gpr[rB(ctx->opcode)]); \
1308 if (unlikely(Rc(ctx->opcode) != 0)) \
1309 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1312 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1313 static void glue(gen_, name)(DisasContext *ctx) \
1315 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1316 if (unlikely(Rc(ctx->opcode) != 0)) \
1317 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1320 /* and & and. */
1321 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1322 /* andc & andc. */
1323 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1325 /* andi. */
1326 static void gen_andi_(DisasContext *ctx)
1328 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1329 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1332 /* andis. */
1333 static void gen_andis_(DisasContext *ctx)
1335 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1336 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1339 /* cntlzw */
1340 static void gen_cntlzw(DisasContext *ctx)
1342 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1343 if (unlikely(Rc(ctx->opcode) != 0))
1344 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1346 /* eqv & eqv. */
1347 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1348 /* extsb & extsb. */
1349 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1350 /* extsh & extsh. */
1351 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1352 /* nand & nand. */
1353 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1354 /* nor & nor. */
1355 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1357 /* or & or. */
1358 static void gen_or(DisasContext *ctx)
1360 int rs, ra, rb;
1362 rs = rS(ctx->opcode);
1363 ra = rA(ctx->opcode);
1364 rb = rB(ctx->opcode);
1365 /* Optimisation for mr. ri case */
1366 if (rs != ra || rs != rb) {
1367 if (rs != rb)
1368 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1369 else
1370 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1371 if (unlikely(Rc(ctx->opcode) != 0))
1372 gen_set_Rc0(ctx, cpu_gpr[ra]);
1373 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1374 gen_set_Rc0(ctx, cpu_gpr[rs]);
1375 #if defined(TARGET_PPC64)
1376 } else {
1377 int prio = 0;
1379 switch (rs) {
1380 case 1:
1381 /* Set process priority to low */
1382 prio = 2;
1383 break;
1384 case 6:
1385 /* Set process priority to medium-low */
1386 prio = 3;
1387 break;
1388 case 2:
1389 /* Set process priority to normal */
1390 prio = 4;
1391 break;
1392 #if !defined(CONFIG_USER_ONLY)
1393 case 31:
1394 if (ctx->mem_idx > 0) {
1395 /* Set process priority to very low */
1396 prio = 1;
1398 break;
1399 case 5:
1400 if (ctx->mem_idx > 0) {
1401 /* Set process priority to medium-hight */
1402 prio = 5;
1404 break;
1405 case 3:
1406 if (ctx->mem_idx > 0) {
1407 /* Set process priority to high */
1408 prio = 6;
1410 break;
1411 case 7:
1412 if (ctx->mem_idx > 1) {
1413 /* Set process priority to very high */
1414 prio = 7;
1416 break;
1417 #endif
1418 default:
1419 /* nop */
1420 break;
1422 if (prio) {
1423 TCGv t0 = tcg_temp_new();
1424 gen_load_spr(t0, SPR_PPR);
1425 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1426 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1427 gen_store_spr(SPR_PPR, t0);
1428 tcg_temp_free(t0);
1430 #endif
1433 /* orc & orc. */
1434 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1436 /* xor & xor. */
1437 static void gen_xor(DisasContext *ctx)
1439 /* Optimisation for "set to zero" case */
1440 if (rS(ctx->opcode) != rB(ctx->opcode))
1441 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1442 else
1443 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1444 if (unlikely(Rc(ctx->opcode) != 0))
1445 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1448 /* ori */
1449 static void gen_ori(DisasContext *ctx)
1451 target_ulong uimm = UIMM(ctx->opcode);
1453 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1454 /* NOP */
1455 /* XXX: should handle special NOPs for POWER series */
1456 return;
1458 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1461 /* oris */
1462 static void gen_oris(DisasContext *ctx)
1464 target_ulong uimm = UIMM(ctx->opcode);
1466 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1467 /* NOP */
1468 return;
1470 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1473 /* xori */
1474 static void gen_xori(DisasContext *ctx)
1476 target_ulong uimm = UIMM(ctx->opcode);
1478 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1479 /* NOP */
1480 return;
1482 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1485 /* xoris */
1486 static void gen_xoris(DisasContext *ctx)
1488 target_ulong uimm = UIMM(ctx->opcode);
1490 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1491 /* NOP */
1492 return;
1494 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1497 /* popcntb : PowerPC 2.03 specification */
1498 static void gen_popcntb(DisasContext *ctx)
1500 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1503 static void gen_popcntw(DisasContext *ctx)
1505 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1508 #if defined(TARGET_PPC64)
1509 /* popcntd: PowerPC 2.06 specification */
1510 static void gen_popcntd(DisasContext *ctx)
1512 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1514 #endif
1516 #if defined(TARGET_PPC64)
1517 /* extsw & extsw. */
1518 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1520 /* cntlzd */
1521 static void gen_cntlzd(DisasContext *ctx)
1523 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1524 if (unlikely(Rc(ctx->opcode) != 0))
1525 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1527 #endif
1529 /*** Integer rotate ***/
1531 /* rlwimi & rlwimi. */
1532 static void gen_rlwimi(DisasContext *ctx)
1534 uint32_t mb, me, sh;
1536 mb = MB(ctx->opcode);
1537 me = ME(ctx->opcode);
1538 sh = SH(ctx->opcode);
1539 if (likely(sh == 0 && mb == 0 && me == 31)) {
1540 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1541 } else {
1542 target_ulong mask;
1543 TCGv t1;
1544 TCGv t0 = tcg_temp_new();
1545 #if defined(TARGET_PPC64)
1546 TCGv_i32 t2 = tcg_temp_new_i32();
1547 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1548 tcg_gen_rotli_i32(t2, t2, sh);
1549 tcg_gen_extu_i32_i64(t0, t2);
1550 tcg_temp_free_i32(t2);
1551 #else
1552 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1553 #endif
1554 #if defined(TARGET_PPC64)
1555 mb += 32;
1556 me += 32;
1557 #endif
1558 mask = MASK(mb, me);
1559 t1 = tcg_temp_new();
1560 tcg_gen_andi_tl(t0, t0, mask);
1561 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1562 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1563 tcg_temp_free(t0);
1564 tcg_temp_free(t1);
1566 if (unlikely(Rc(ctx->opcode) != 0))
1567 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1570 /* rlwinm & rlwinm. */
1571 static void gen_rlwinm(DisasContext *ctx)
1573 uint32_t mb, me, sh;
1575 sh = SH(ctx->opcode);
1576 mb = MB(ctx->opcode);
1577 me = ME(ctx->opcode);
1579 if (likely(mb == 0 && me == (31 - sh))) {
1580 if (likely(sh == 0)) {
1581 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1582 } else {
1583 TCGv t0 = tcg_temp_new();
1584 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1585 tcg_gen_shli_tl(t0, t0, sh);
1586 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1587 tcg_temp_free(t0);
1589 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1590 TCGv t0 = tcg_temp_new();
1591 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1592 tcg_gen_shri_tl(t0, t0, mb);
1593 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1594 tcg_temp_free(t0);
1595 } else {
1596 TCGv t0 = tcg_temp_new();
1597 #if defined(TARGET_PPC64)
1598 TCGv_i32 t1 = tcg_temp_new_i32();
1599 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1600 tcg_gen_rotli_i32(t1, t1, sh);
1601 tcg_gen_extu_i32_i64(t0, t1);
1602 tcg_temp_free_i32(t1);
1603 #else
1604 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1605 #endif
1606 #if defined(TARGET_PPC64)
1607 mb += 32;
1608 me += 32;
1609 #endif
1610 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1611 tcg_temp_free(t0);
1613 if (unlikely(Rc(ctx->opcode) != 0))
1614 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1617 /* rlwnm & rlwnm. */
1618 static void gen_rlwnm(DisasContext *ctx)
1620 uint32_t mb, me;
1621 TCGv t0;
1622 #if defined(TARGET_PPC64)
1623 TCGv_i32 t1, t2;
1624 #endif
1626 mb = MB(ctx->opcode);
1627 me = ME(ctx->opcode);
1628 t0 = tcg_temp_new();
1629 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1630 #if defined(TARGET_PPC64)
1631 t1 = tcg_temp_new_i32();
1632 t2 = tcg_temp_new_i32();
1633 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1634 tcg_gen_trunc_i64_i32(t2, t0);
1635 tcg_gen_rotl_i32(t1, t1, t2);
1636 tcg_gen_extu_i32_i64(t0, t1);
1637 tcg_temp_free_i32(t1);
1638 tcg_temp_free_i32(t2);
1639 #else
1640 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1641 #endif
1642 if (unlikely(mb != 0 || me != 31)) {
1643 #if defined(TARGET_PPC64)
1644 mb += 32;
1645 me += 32;
1646 #endif
1647 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1648 } else {
1649 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1651 tcg_temp_free(t0);
1652 if (unlikely(Rc(ctx->opcode) != 0))
1653 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1656 #if defined(TARGET_PPC64)
1657 #define GEN_PPC64_R2(name, opc1, opc2) \
1658 static void glue(gen_, name##0)(DisasContext *ctx) \
1660 gen_##name(ctx, 0); \
1663 static void glue(gen_, name##1)(DisasContext *ctx) \
1665 gen_##name(ctx, 1); \
1667 #define GEN_PPC64_R4(name, opc1, opc2) \
1668 static void glue(gen_, name##0)(DisasContext *ctx) \
1670 gen_##name(ctx, 0, 0); \
1673 static void glue(gen_, name##1)(DisasContext *ctx) \
1675 gen_##name(ctx, 0, 1); \
1678 static void glue(gen_, name##2)(DisasContext *ctx) \
1680 gen_##name(ctx, 1, 0); \
1683 static void glue(gen_, name##3)(DisasContext *ctx) \
1685 gen_##name(ctx, 1, 1); \
1688 static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1689 uint32_t sh)
1691 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1692 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1693 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1694 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1695 } else {
1696 TCGv t0 = tcg_temp_new();
1697 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1698 if (likely(mb == 0 && me == 63)) {
1699 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1700 } else {
1701 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1703 tcg_temp_free(t0);
1705 if (unlikely(Rc(ctx->opcode) != 0))
1706 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1708 /* rldicl - rldicl. */
1709 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1711 uint32_t sh, mb;
1713 sh = SH(ctx->opcode) | (shn << 5);
1714 mb = MB(ctx->opcode) | (mbn << 5);
1715 gen_rldinm(ctx, mb, 63, sh);
1717 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1718 /* rldicr - rldicr. */
1719 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1721 uint32_t sh, me;
1723 sh = SH(ctx->opcode) | (shn << 5);
1724 me = MB(ctx->opcode) | (men << 5);
1725 gen_rldinm(ctx, 0, me, sh);
1727 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1728 /* rldic - rldic. */
1729 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1731 uint32_t sh, mb;
1733 sh = SH(ctx->opcode) | (shn << 5);
1734 mb = MB(ctx->opcode) | (mbn << 5);
1735 gen_rldinm(ctx, mb, 63 - sh, sh);
1737 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1739 static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1741 TCGv t0;
1743 mb = MB(ctx->opcode);
1744 me = ME(ctx->opcode);
1745 t0 = tcg_temp_new();
1746 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1747 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1748 if (unlikely(mb != 0 || me != 63)) {
1749 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1750 } else {
1751 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1753 tcg_temp_free(t0);
1754 if (unlikely(Rc(ctx->opcode) != 0))
1755 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1758 /* rldcl - rldcl. */
1759 static inline void gen_rldcl(DisasContext *ctx, int mbn)
1761 uint32_t mb;
1763 mb = MB(ctx->opcode) | (mbn << 5);
1764 gen_rldnm(ctx, mb, 63);
1766 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1767 /* rldcr - rldcr. */
1768 static inline void gen_rldcr(DisasContext *ctx, int men)
1770 uint32_t me;
1772 me = MB(ctx->opcode) | (men << 5);
1773 gen_rldnm(ctx, 0, me);
1775 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1776 /* rldimi - rldimi. */
1777 static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1779 uint32_t sh, mb, me;
1781 sh = SH(ctx->opcode) | (shn << 5);
1782 mb = MB(ctx->opcode) | (mbn << 5);
1783 me = 63 - sh;
1784 if (unlikely(sh == 0 && mb == 0)) {
1785 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1786 } else {
1787 TCGv t0, t1;
1788 target_ulong mask;
1790 t0 = tcg_temp_new();
1791 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1792 t1 = tcg_temp_new();
1793 mask = MASK(mb, me);
1794 tcg_gen_andi_tl(t0, t0, mask);
1795 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1796 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1797 tcg_temp_free(t0);
1798 tcg_temp_free(t1);
1800 if (unlikely(Rc(ctx->opcode) != 0))
1801 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1803 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1804 #endif
1806 /*** Integer shift ***/
1808 /* slw & slw. */
1809 static void gen_slw(DisasContext *ctx)
1811 TCGv t0, t1;
1813 t0 = tcg_temp_new();
1814 /* AND rS with a mask that is 0 when rB >= 0x20 */
1815 #if defined(TARGET_PPC64)
1816 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1817 tcg_gen_sari_tl(t0, t0, 0x3f);
1818 #else
1819 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1820 tcg_gen_sari_tl(t0, t0, 0x1f);
1821 #endif
1822 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1823 t1 = tcg_temp_new();
1824 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1825 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1826 tcg_temp_free(t1);
1827 tcg_temp_free(t0);
1828 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1829 if (unlikely(Rc(ctx->opcode) != 0))
1830 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1833 /* sraw & sraw. */
1834 static void gen_sraw(DisasContext *ctx)
1836 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1837 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1838 if (unlikely(Rc(ctx->opcode) != 0))
1839 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1842 /* srawi & srawi. */
1843 static void gen_srawi(DisasContext *ctx)
1845 int sh = SH(ctx->opcode);
1846 if (sh != 0) {
1847 int l1, l2;
1848 TCGv t0;
1849 l1 = gen_new_label();
1850 l2 = gen_new_label();
1851 t0 = tcg_temp_local_new();
1852 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1853 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1854 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1855 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1856 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1857 tcg_gen_br(l2);
1858 gen_set_label(l1);
1859 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1860 gen_set_label(l2);
1861 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1862 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1863 tcg_temp_free(t0);
1864 } else {
1865 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1866 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1868 if (unlikely(Rc(ctx->opcode) != 0))
1869 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1872 /* srw & srw. */
1873 static void gen_srw(DisasContext *ctx)
1875 TCGv t0, t1;
1877 t0 = tcg_temp_new();
1878 /* AND rS with a mask that is 0 when rB >= 0x20 */
1879 #if defined(TARGET_PPC64)
1880 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1881 tcg_gen_sari_tl(t0, t0, 0x3f);
1882 #else
1883 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1884 tcg_gen_sari_tl(t0, t0, 0x1f);
1885 #endif
1886 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1887 tcg_gen_ext32u_tl(t0, t0);
1888 t1 = tcg_temp_new();
1889 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1890 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1891 tcg_temp_free(t1);
1892 tcg_temp_free(t0);
1893 if (unlikely(Rc(ctx->opcode) != 0))
1894 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1897 #if defined(TARGET_PPC64)
1898 /* sld & sld. */
1899 static void gen_sld(DisasContext *ctx)
1901 TCGv t0, t1;
1903 t0 = tcg_temp_new();
1904 /* AND rS with a mask that is 0 when rB >= 0x40 */
1905 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1906 tcg_gen_sari_tl(t0, t0, 0x3f);
1907 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1908 t1 = tcg_temp_new();
1909 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1910 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1911 tcg_temp_free(t1);
1912 tcg_temp_free(t0);
1913 if (unlikely(Rc(ctx->opcode) != 0))
1914 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1917 /* srad & srad. */
1918 static void gen_srad(DisasContext *ctx)
1920 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
1921 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1922 if (unlikely(Rc(ctx->opcode) != 0))
1923 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1925 /* sradi & sradi. */
1926 static inline void gen_sradi(DisasContext *ctx, int n)
1928 int sh = SH(ctx->opcode) + (n << 5);
1929 if (sh != 0) {
1930 int l1, l2;
1931 TCGv t0;
1932 l1 = gen_new_label();
1933 l2 = gen_new_label();
1934 t0 = tcg_temp_local_new();
1935 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
1936 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1937 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1938 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1939 tcg_gen_br(l2);
1940 gen_set_label(l1);
1941 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1942 gen_set_label(l2);
1943 tcg_temp_free(t0);
1944 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1945 } else {
1946 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1947 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1949 if (unlikely(Rc(ctx->opcode) != 0))
1950 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1953 static void gen_sradi0(DisasContext *ctx)
1955 gen_sradi(ctx, 0);
1958 static void gen_sradi1(DisasContext *ctx)
1960 gen_sradi(ctx, 1);
1963 /* srd & srd. */
1964 static void gen_srd(DisasContext *ctx)
1966 TCGv t0, t1;
1968 t0 = tcg_temp_new();
1969 /* AND rS with a mask that is 0 when rB >= 0x40 */
1970 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1971 tcg_gen_sari_tl(t0, t0, 0x3f);
1972 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1973 t1 = tcg_temp_new();
1974 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1975 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1976 tcg_temp_free(t1);
1977 tcg_temp_free(t0);
1978 if (unlikely(Rc(ctx->opcode) != 0))
1979 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1981 #endif
1983 /*** Floating-Point arithmetic ***/
1984 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1985 static void gen_f##name(DisasContext *ctx) \
1987 if (unlikely(!ctx->fpu_enabled)) { \
1988 gen_exception(ctx, POWERPC_EXCP_FPU); \
1989 return; \
1991 /* NIP cannot be restored if the memory exception comes from an helper */ \
1992 gen_update_nip(ctx, ctx->nip - 4); \
1993 gen_reset_fpstatus(); \
1994 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
1995 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1996 if (isfloat) { \
1997 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
1999 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
2000 Rc(ctx->opcode) != 0); \
2003 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2004 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2005 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2007 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2008 static void gen_f##name(DisasContext *ctx) \
2010 if (unlikely(!ctx->fpu_enabled)) { \
2011 gen_exception(ctx, POWERPC_EXCP_FPU); \
2012 return; \
2014 /* NIP cannot be restored if the memory exception comes from an helper */ \
2015 gen_update_nip(ctx, ctx->nip - 4); \
2016 gen_reset_fpstatus(); \
2017 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2018 cpu_fpr[rB(ctx->opcode)]); \
2019 if (isfloat) { \
2020 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2022 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2023 set_fprf, Rc(ctx->opcode) != 0); \
2025 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2026 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2027 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2029 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2030 static void gen_f##name(DisasContext *ctx) \
2032 if (unlikely(!ctx->fpu_enabled)) { \
2033 gen_exception(ctx, POWERPC_EXCP_FPU); \
2034 return; \
2036 /* NIP cannot be restored if the memory exception comes from an helper */ \
2037 gen_update_nip(ctx, ctx->nip - 4); \
2038 gen_reset_fpstatus(); \
2039 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2040 cpu_fpr[rC(ctx->opcode)]); \
2041 if (isfloat) { \
2042 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2044 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2045 set_fprf, Rc(ctx->opcode) != 0); \
2047 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2048 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2049 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2051 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2052 static void gen_f##name(DisasContext *ctx) \
2054 if (unlikely(!ctx->fpu_enabled)) { \
2055 gen_exception(ctx, POWERPC_EXCP_FPU); \
2056 return; \
2058 /* NIP cannot be restored if the memory exception comes from an helper */ \
2059 gen_update_nip(ctx, ctx->nip - 4); \
2060 gen_reset_fpstatus(); \
2061 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2062 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2063 set_fprf, Rc(ctx->opcode) != 0); \
2066 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2067 static void gen_f##name(DisasContext *ctx) \
2069 if (unlikely(!ctx->fpu_enabled)) { \
2070 gen_exception(ctx, POWERPC_EXCP_FPU); \
2071 return; \
2073 /* NIP cannot be restored if the memory exception comes from an helper */ \
2074 gen_update_nip(ctx, ctx->nip - 4); \
2075 gen_reset_fpstatus(); \
2076 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2077 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2078 set_fprf, Rc(ctx->opcode) != 0); \
2081 /* fadd - fadds */
2082 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2083 /* fdiv - fdivs */
2084 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2085 /* fmul - fmuls */
2086 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2088 /* fre */
2089 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2091 /* fres */
2092 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2094 /* frsqrte */
2095 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2097 /* frsqrtes */
2098 static void gen_frsqrtes(DisasContext *ctx)
2100 if (unlikely(!ctx->fpu_enabled)) {
2101 gen_exception(ctx, POWERPC_EXCP_FPU);
2102 return;
2104 /* NIP cannot be restored if the memory exception comes from an helper */
2105 gen_update_nip(ctx, ctx->nip - 4);
2106 gen_reset_fpstatus();
2107 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2108 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2109 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2112 /* fsel */
2113 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2114 /* fsub - fsubs */
2115 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2116 /* Optional: */
2118 /* fsqrt */
2119 static void gen_fsqrt(DisasContext *ctx)
2121 if (unlikely(!ctx->fpu_enabled)) {
2122 gen_exception(ctx, POWERPC_EXCP_FPU);
2123 return;
2125 /* NIP cannot be restored if the memory exception comes from an helper */
2126 gen_update_nip(ctx, ctx->nip - 4);
2127 gen_reset_fpstatus();
2128 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2129 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2132 static void gen_fsqrts(DisasContext *ctx)
2134 if (unlikely(!ctx->fpu_enabled)) {
2135 gen_exception(ctx, POWERPC_EXCP_FPU);
2136 return;
2138 /* NIP cannot be restored if the memory exception comes from an helper */
2139 gen_update_nip(ctx, ctx->nip - 4);
2140 gen_reset_fpstatus();
2141 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2142 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2143 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2146 /*** Floating-Point multiply-and-add ***/
2147 /* fmadd - fmadds */
2148 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2149 /* fmsub - fmsubs */
2150 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2151 /* fnmadd - fnmadds */
2152 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2153 /* fnmsub - fnmsubs */
2154 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2156 /*** Floating-Point round & convert ***/
2157 /* fctiw */
2158 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2159 /* fctiwz */
2160 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2161 /* frsp */
2162 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2163 #if defined(TARGET_PPC64)
2164 /* fcfid */
2165 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2166 /* fctid */
2167 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2168 /* fctidz */
2169 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2170 #endif
2172 /* frin */
2173 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2174 /* friz */
2175 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2176 /* frip */
2177 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2178 /* frim */
2179 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2181 /*** Floating-Point compare ***/
2183 /* fcmpo */
2184 static void gen_fcmpo(DisasContext *ctx)
2186 TCGv_i32 crf;
2187 if (unlikely(!ctx->fpu_enabled)) {
2188 gen_exception(ctx, POWERPC_EXCP_FPU);
2189 return;
2191 /* NIP cannot be restored if the memory exception comes from an helper */
2192 gen_update_nip(ctx, ctx->nip - 4);
2193 gen_reset_fpstatus();
2194 crf = tcg_const_i32(crfD(ctx->opcode));
2195 gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2196 tcg_temp_free_i32(crf);
2197 gen_helper_float_check_status();
2200 /* fcmpu */
2201 static void gen_fcmpu(DisasContext *ctx)
2203 TCGv_i32 crf;
2204 if (unlikely(!ctx->fpu_enabled)) {
2205 gen_exception(ctx, POWERPC_EXCP_FPU);
2206 return;
2208 /* NIP cannot be restored if the memory exception comes from an helper */
2209 gen_update_nip(ctx, ctx->nip - 4);
2210 gen_reset_fpstatus();
2211 crf = tcg_const_i32(crfD(ctx->opcode));
2212 gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2213 tcg_temp_free_i32(crf);
2214 gen_helper_float_check_status();
2217 /*** Floating-point move ***/
2218 /* fabs */
2219 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2220 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2222 /* fmr - fmr. */
2223 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2224 static void gen_fmr(DisasContext *ctx)
2226 if (unlikely(!ctx->fpu_enabled)) {
2227 gen_exception(ctx, POWERPC_EXCP_FPU);
2228 return;
2230 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2231 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2234 /* fnabs */
2235 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2236 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2237 /* fneg */
2238 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2239 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2241 /*** Floating-Point status & ctrl register ***/
2243 /* mcrfs */
2244 static void gen_mcrfs(DisasContext *ctx)
2246 int bfa;
2248 if (unlikely(!ctx->fpu_enabled)) {
2249 gen_exception(ctx, POWERPC_EXCP_FPU);
2250 return;
2252 bfa = 4 * (7 - crfS(ctx->opcode));
2253 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2254 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2255 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2258 /* mffs */
2259 static void gen_mffs(DisasContext *ctx)
2261 if (unlikely(!ctx->fpu_enabled)) {
2262 gen_exception(ctx, POWERPC_EXCP_FPU);
2263 return;
2265 gen_reset_fpstatus();
2266 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2267 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2270 /* mtfsb0 */
2271 static void gen_mtfsb0(DisasContext *ctx)
2273 uint8_t crb;
2275 if (unlikely(!ctx->fpu_enabled)) {
2276 gen_exception(ctx, POWERPC_EXCP_FPU);
2277 return;
2279 crb = 31 - crbD(ctx->opcode);
2280 gen_reset_fpstatus();
2281 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2282 TCGv_i32 t0;
2283 /* NIP cannot be restored if the memory exception comes from an helper */
2284 gen_update_nip(ctx, ctx->nip - 4);
2285 t0 = tcg_const_i32(crb);
2286 gen_helper_fpscr_clrbit(t0);
2287 tcg_temp_free_i32(t0);
2289 if (unlikely(Rc(ctx->opcode) != 0)) {
2290 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2294 /* mtfsb1 */
2295 static void gen_mtfsb1(DisasContext *ctx)
2297 uint8_t crb;
2299 if (unlikely(!ctx->fpu_enabled)) {
2300 gen_exception(ctx, POWERPC_EXCP_FPU);
2301 return;
2303 crb = 31 - crbD(ctx->opcode);
2304 gen_reset_fpstatus();
2305 /* XXX: we pretend we can only do IEEE floating-point computations */
2306 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2307 TCGv_i32 t0;
2308 /* NIP cannot be restored if the memory exception comes from an helper */
2309 gen_update_nip(ctx, ctx->nip - 4);
2310 t0 = tcg_const_i32(crb);
2311 gen_helper_fpscr_setbit(t0);
2312 tcg_temp_free_i32(t0);
2314 if (unlikely(Rc(ctx->opcode) != 0)) {
2315 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2317 /* We can raise a differed exception */
2318 gen_helper_float_check_status();
2321 /* mtfsf */
2322 static void gen_mtfsf(DisasContext *ctx)
2324 TCGv_i32 t0;
2325 int L = ctx->opcode & 0x02000000;
2327 if (unlikely(!ctx->fpu_enabled)) {
2328 gen_exception(ctx, POWERPC_EXCP_FPU);
2329 return;
2331 /* NIP cannot be restored if the memory exception comes from an helper */
2332 gen_update_nip(ctx, ctx->nip - 4);
2333 gen_reset_fpstatus();
2334 if (L)
2335 t0 = tcg_const_i32(0xff);
2336 else
2337 t0 = tcg_const_i32(FM(ctx->opcode));
2338 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2339 tcg_temp_free_i32(t0);
2340 if (unlikely(Rc(ctx->opcode) != 0)) {
2341 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2343 /* We can raise a differed exception */
2344 gen_helper_float_check_status();
2347 /* mtfsfi */
2348 static void gen_mtfsfi(DisasContext *ctx)
2350 int bf, sh;
2351 TCGv_i64 t0;
2352 TCGv_i32 t1;
2354 if (unlikely(!ctx->fpu_enabled)) {
2355 gen_exception(ctx, POWERPC_EXCP_FPU);
2356 return;
2358 bf = crbD(ctx->opcode) >> 2;
2359 sh = 7 - bf;
2360 /* NIP cannot be restored if the memory exception comes from an helper */
2361 gen_update_nip(ctx, ctx->nip - 4);
2362 gen_reset_fpstatus();
2363 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2364 t1 = tcg_const_i32(1 << sh);
2365 gen_helper_store_fpscr(t0, t1);
2366 tcg_temp_free_i64(t0);
2367 tcg_temp_free_i32(t1);
2368 if (unlikely(Rc(ctx->opcode) != 0)) {
2369 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2371 /* We can raise a differed exception */
2372 gen_helper_float_check_status();
2375 /*** Addressing modes ***/
2376 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2377 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2378 target_long maskl)
2380 target_long simm = SIMM(ctx->opcode);
2382 simm &= ~maskl;
2383 if (rA(ctx->opcode) == 0) {
2384 #if defined(TARGET_PPC64)
2385 if (!ctx->sf_mode) {
2386 tcg_gen_movi_tl(EA, (uint32_t)simm);
2387 } else
2388 #endif
2389 tcg_gen_movi_tl(EA, simm);
2390 } else if (likely(simm != 0)) {
2391 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2392 #if defined(TARGET_PPC64)
2393 if (!ctx->sf_mode) {
2394 tcg_gen_ext32u_tl(EA, EA);
2396 #endif
2397 } else {
2398 #if defined(TARGET_PPC64)
2399 if (!ctx->sf_mode) {
2400 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2401 } else
2402 #endif
2403 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2407 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2409 if (rA(ctx->opcode) == 0) {
2410 #if defined(TARGET_PPC64)
2411 if (!ctx->sf_mode) {
2412 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2413 } else
2414 #endif
2415 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2416 } else {
2417 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2418 #if defined(TARGET_PPC64)
2419 if (!ctx->sf_mode) {
2420 tcg_gen_ext32u_tl(EA, EA);
2422 #endif
2426 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2428 if (rA(ctx->opcode) == 0) {
2429 tcg_gen_movi_tl(EA, 0);
2430 } else {
2431 #if defined(TARGET_PPC64)
2432 if (!ctx->sf_mode) {
2433 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2434 } else
2435 #endif
2436 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2440 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2441 target_long val)
2443 tcg_gen_addi_tl(ret, arg1, val);
2444 #if defined(TARGET_PPC64)
2445 if (!ctx->sf_mode) {
2446 tcg_gen_ext32u_tl(ret, ret);
2448 #endif
2451 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2453 int l1 = gen_new_label();
2454 TCGv t0 = tcg_temp_new();
2455 TCGv_i32 t1, t2;
2456 /* NIP cannot be restored if the memory exception comes from an helper */
2457 gen_update_nip(ctx, ctx->nip - 4);
2458 tcg_gen_andi_tl(t0, EA, mask);
2459 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2460 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2461 t2 = tcg_const_i32(0);
2462 gen_helper_raise_exception_err(t1, t2);
2463 tcg_temp_free_i32(t1);
2464 tcg_temp_free_i32(t2);
2465 gen_set_label(l1);
2466 tcg_temp_free(t0);
2469 /*** Integer load ***/
2470 static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2472 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2475 static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2477 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2480 static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2482 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2483 if (unlikely(ctx->le_mode)) {
2484 tcg_gen_bswap16_tl(arg1, arg1);
2488 static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2490 if (unlikely(ctx->le_mode)) {
2491 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2492 tcg_gen_bswap16_tl(arg1, arg1);
2493 tcg_gen_ext16s_tl(arg1, arg1);
2494 } else {
2495 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2499 static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2501 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2502 if (unlikely(ctx->le_mode)) {
2503 tcg_gen_bswap32_tl(arg1, arg1);
2507 #if defined(TARGET_PPC64)
2508 static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2510 if (unlikely(ctx->le_mode)) {
2511 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2512 tcg_gen_bswap32_tl(arg1, arg1);
2513 tcg_gen_ext32s_tl(arg1, arg1);
2514 } else
2515 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2517 #endif
2519 static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2521 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2522 if (unlikely(ctx->le_mode)) {
2523 tcg_gen_bswap64_i64(arg1, arg1);
2527 static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2529 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2532 static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2534 if (unlikely(ctx->le_mode)) {
2535 TCGv t0 = tcg_temp_new();
2536 tcg_gen_ext16u_tl(t0, arg1);
2537 tcg_gen_bswap16_tl(t0, t0);
2538 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2539 tcg_temp_free(t0);
2540 } else {
2541 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2545 static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2547 if (unlikely(ctx->le_mode)) {
2548 TCGv t0 = tcg_temp_new();
2549 tcg_gen_ext32u_tl(t0, arg1);
2550 tcg_gen_bswap32_tl(t0, t0);
2551 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2552 tcg_temp_free(t0);
2553 } else {
2554 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2558 static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2560 if (unlikely(ctx->le_mode)) {
2561 TCGv_i64 t0 = tcg_temp_new_i64();
2562 tcg_gen_bswap64_i64(t0, arg1);
2563 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2564 tcg_temp_free_i64(t0);
2565 } else
2566 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2569 #define GEN_LD(name, ldop, opc, type) \
2570 static void glue(gen_, name)(DisasContext *ctx) \
2572 TCGv EA; \
2573 gen_set_access_type(ctx, ACCESS_INT); \
2574 EA = tcg_temp_new(); \
2575 gen_addr_imm_index(ctx, EA, 0); \
2576 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2577 tcg_temp_free(EA); \
2580 #define GEN_LDU(name, ldop, opc, type) \
2581 static void glue(gen_, name##u)(DisasContext *ctx) \
2583 TCGv EA; \
2584 if (unlikely(rA(ctx->opcode) == 0 || \
2585 rA(ctx->opcode) == rD(ctx->opcode))) { \
2586 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2587 return; \
2589 gen_set_access_type(ctx, ACCESS_INT); \
2590 EA = tcg_temp_new(); \
2591 if (type == PPC_64B) \
2592 gen_addr_imm_index(ctx, EA, 0x03); \
2593 else \
2594 gen_addr_imm_index(ctx, EA, 0); \
2595 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2596 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2597 tcg_temp_free(EA); \
2600 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2601 static void glue(gen_, name##ux)(DisasContext *ctx) \
2603 TCGv EA; \
2604 if (unlikely(rA(ctx->opcode) == 0 || \
2605 rA(ctx->opcode) == rD(ctx->opcode))) { \
2606 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2607 return; \
2609 gen_set_access_type(ctx, ACCESS_INT); \
2610 EA = tcg_temp_new(); \
2611 gen_addr_reg_index(ctx, EA); \
2612 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2613 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2614 tcg_temp_free(EA); \
2617 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2618 static void glue(gen_, name##x)(DisasContext *ctx) \
2620 TCGv EA; \
2621 gen_set_access_type(ctx, ACCESS_INT); \
2622 EA = tcg_temp_new(); \
2623 gen_addr_reg_index(ctx, EA); \
2624 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2625 tcg_temp_free(EA); \
2628 #define GEN_LDS(name, ldop, op, type) \
2629 GEN_LD(name, ldop, op | 0x20, type); \
2630 GEN_LDU(name, ldop, op | 0x21, type); \
2631 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2632 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2634 /* lbz lbzu lbzux lbzx */
2635 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2636 /* lha lhau lhaux lhax */
2637 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2638 /* lhz lhzu lhzux lhzx */
2639 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2640 /* lwz lwzu lwzux lwzx */
2641 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2642 #if defined(TARGET_PPC64)
2643 /* lwaux */
2644 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2645 /* lwax */
2646 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2647 /* ldux */
2648 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2649 /* ldx */
2650 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2652 static void gen_ld(DisasContext *ctx)
2654 TCGv EA;
2655 if (Rc(ctx->opcode)) {
2656 if (unlikely(rA(ctx->opcode) == 0 ||
2657 rA(ctx->opcode) == rD(ctx->opcode))) {
2658 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2659 return;
2662 gen_set_access_type(ctx, ACCESS_INT);
2663 EA = tcg_temp_new();
2664 gen_addr_imm_index(ctx, EA, 0x03);
2665 if (ctx->opcode & 0x02) {
2666 /* lwa (lwau is undefined) */
2667 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2668 } else {
2669 /* ld - ldu */
2670 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2672 if (Rc(ctx->opcode))
2673 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2674 tcg_temp_free(EA);
2677 /* lq */
2678 static void gen_lq(DisasContext *ctx)
2680 #if defined(CONFIG_USER_ONLY)
2681 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2682 #else
2683 int ra, rd;
2684 TCGv EA;
2686 /* Restore CPU state */
2687 if (unlikely(ctx->mem_idx == 0)) {
2688 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2689 return;
2691 ra = rA(ctx->opcode);
2692 rd = rD(ctx->opcode);
2693 if (unlikely((rd & 1) || rd == ra)) {
2694 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2695 return;
2697 if (unlikely(ctx->le_mode)) {
2698 /* Little-endian mode is not handled */
2699 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2700 return;
2702 gen_set_access_type(ctx, ACCESS_INT);
2703 EA = tcg_temp_new();
2704 gen_addr_imm_index(ctx, EA, 0x0F);
2705 gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2706 gen_addr_add(ctx, EA, EA, 8);
2707 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2708 tcg_temp_free(EA);
2709 #endif
2711 #endif
2713 /*** Integer store ***/
2714 #define GEN_ST(name, stop, opc, type) \
2715 static void glue(gen_, name)(DisasContext *ctx) \
2717 TCGv EA; \
2718 gen_set_access_type(ctx, ACCESS_INT); \
2719 EA = tcg_temp_new(); \
2720 gen_addr_imm_index(ctx, EA, 0); \
2721 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2722 tcg_temp_free(EA); \
2725 #define GEN_STU(name, stop, opc, type) \
2726 static void glue(gen_, stop##u)(DisasContext *ctx) \
2728 TCGv EA; \
2729 if (unlikely(rA(ctx->opcode) == 0)) { \
2730 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2731 return; \
2733 gen_set_access_type(ctx, ACCESS_INT); \
2734 EA = tcg_temp_new(); \
2735 if (type == PPC_64B) \
2736 gen_addr_imm_index(ctx, EA, 0x03); \
2737 else \
2738 gen_addr_imm_index(ctx, EA, 0); \
2739 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2740 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2741 tcg_temp_free(EA); \
2744 #define GEN_STUX(name, stop, opc2, opc3, type) \
2745 static void glue(gen_, name##ux)(DisasContext *ctx) \
2747 TCGv EA; \
2748 if (unlikely(rA(ctx->opcode) == 0)) { \
2749 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2750 return; \
2752 gen_set_access_type(ctx, ACCESS_INT); \
2753 EA = tcg_temp_new(); \
2754 gen_addr_reg_index(ctx, EA); \
2755 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2756 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2757 tcg_temp_free(EA); \
2760 #define GEN_STX(name, stop, opc2, opc3, type) \
2761 static void glue(gen_, name##x)(DisasContext *ctx) \
2763 TCGv EA; \
2764 gen_set_access_type(ctx, ACCESS_INT); \
2765 EA = tcg_temp_new(); \
2766 gen_addr_reg_index(ctx, EA); \
2767 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2768 tcg_temp_free(EA); \
2771 #define GEN_STS(name, stop, op, type) \
2772 GEN_ST(name, stop, op | 0x20, type); \
2773 GEN_STU(name, stop, op | 0x21, type); \
2774 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2775 GEN_STX(name, stop, 0x17, op | 0x00, type)
2777 /* stb stbu stbux stbx */
2778 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2779 /* sth sthu sthux sthx */
2780 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2781 /* stw stwu stwux stwx */
2782 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2783 #if defined(TARGET_PPC64)
2784 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2785 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2787 static void gen_std(DisasContext *ctx)
2789 int rs;
2790 TCGv EA;
2792 rs = rS(ctx->opcode);
2793 if ((ctx->opcode & 0x3) == 0x2) {
2794 #if defined(CONFIG_USER_ONLY)
2795 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2796 #else
2797 /* stq */
2798 if (unlikely(ctx->mem_idx == 0)) {
2799 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2800 return;
2802 if (unlikely(rs & 1)) {
2803 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2804 return;
2806 if (unlikely(ctx->le_mode)) {
2807 /* Little-endian mode is not handled */
2808 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2809 return;
2811 gen_set_access_type(ctx, ACCESS_INT);
2812 EA = tcg_temp_new();
2813 gen_addr_imm_index(ctx, EA, 0x03);
2814 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2815 gen_addr_add(ctx, EA, EA, 8);
2816 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2817 tcg_temp_free(EA);
2818 #endif
2819 } else {
2820 /* std / stdu */
2821 if (Rc(ctx->opcode)) {
2822 if (unlikely(rA(ctx->opcode) == 0)) {
2823 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2824 return;
2827 gen_set_access_type(ctx, ACCESS_INT);
2828 EA = tcg_temp_new();
2829 gen_addr_imm_index(ctx, EA, 0x03);
2830 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2831 if (Rc(ctx->opcode))
2832 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2833 tcg_temp_free(EA);
2836 #endif
2837 /*** Integer load and store with byte reverse ***/
2838 /* lhbrx */
2839 static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2841 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2842 if (likely(!ctx->le_mode)) {
2843 tcg_gen_bswap16_tl(arg1, arg1);
2846 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2848 /* lwbrx */
2849 static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2851 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2852 if (likely(!ctx->le_mode)) {
2853 tcg_gen_bswap32_tl(arg1, arg1);
2856 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2858 /* sthbrx */
2859 static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2861 if (likely(!ctx->le_mode)) {
2862 TCGv t0 = tcg_temp_new();
2863 tcg_gen_ext16u_tl(t0, arg1);
2864 tcg_gen_bswap16_tl(t0, t0);
2865 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2866 tcg_temp_free(t0);
2867 } else {
2868 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2871 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2873 /* stwbrx */
2874 static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2876 if (likely(!ctx->le_mode)) {
2877 TCGv t0 = tcg_temp_new();
2878 tcg_gen_ext32u_tl(t0, arg1);
2879 tcg_gen_bswap32_tl(t0, t0);
2880 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2881 tcg_temp_free(t0);
2882 } else {
2883 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2886 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2888 /*** Integer load and store multiple ***/
2890 /* lmw */
2891 static void gen_lmw(DisasContext *ctx)
2893 TCGv t0;
2894 TCGv_i32 t1;
2895 gen_set_access_type(ctx, ACCESS_INT);
2896 /* NIP cannot be restored if the memory exception comes from an helper */
2897 gen_update_nip(ctx, ctx->nip - 4);
2898 t0 = tcg_temp_new();
2899 t1 = tcg_const_i32(rD(ctx->opcode));
2900 gen_addr_imm_index(ctx, t0, 0);
2901 gen_helper_lmw(t0, t1);
2902 tcg_temp_free(t0);
2903 tcg_temp_free_i32(t1);
2906 /* stmw */
2907 static void gen_stmw(DisasContext *ctx)
2909 TCGv t0;
2910 TCGv_i32 t1;
2911 gen_set_access_type(ctx, ACCESS_INT);
2912 /* NIP cannot be restored if the memory exception comes from an helper */
2913 gen_update_nip(ctx, ctx->nip - 4);
2914 t0 = tcg_temp_new();
2915 t1 = tcg_const_i32(rS(ctx->opcode));
2916 gen_addr_imm_index(ctx, t0, 0);
2917 gen_helper_stmw(t0, t1);
2918 tcg_temp_free(t0);
2919 tcg_temp_free_i32(t1);
2922 /*** Integer load and store strings ***/
2924 /* lswi */
2925 /* PowerPC32 specification says we must generate an exception if
2926 * rA is in the range of registers to be loaded.
2927 * In an other hand, IBM says this is valid, but rA won't be loaded.
2928 * For now, I'll follow the spec...
2930 static void gen_lswi(DisasContext *ctx)
2932 TCGv t0;
2933 TCGv_i32 t1, t2;
2934 int nb = NB(ctx->opcode);
2935 int start = rD(ctx->opcode);
2936 int ra = rA(ctx->opcode);
2937 int nr;
2939 if (nb == 0)
2940 nb = 32;
2941 nr = nb / 4;
2942 if (unlikely(((start + nr) > 32 &&
2943 start <= ra && (start + nr - 32) > ra) ||
2944 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2945 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2946 return;
2948 gen_set_access_type(ctx, ACCESS_INT);
2949 /* NIP cannot be restored if the memory exception comes from an helper */
2950 gen_update_nip(ctx, ctx->nip - 4);
2951 t0 = tcg_temp_new();
2952 gen_addr_register(ctx, t0);
2953 t1 = tcg_const_i32(nb);
2954 t2 = tcg_const_i32(start);
2955 gen_helper_lsw(t0, t1, t2);
2956 tcg_temp_free(t0);
2957 tcg_temp_free_i32(t1);
2958 tcg_temp_free_i32(t2);
2961 /* lswx */
2962 static void gen_lswx(DisasContext *ctx)
2964 TCGv t0;
2965 TCGv_i32 t1, t2, t3;
2966 gen_set_access_type(ctx, ACCESS_INT);
2967 /* NIP cannot be restored if the memory exception comes from an helper */
2968 gen_update_nip(ctx, ctx->nip - 4);
2969 t0 = tcg_temp_new();
2970 gen_addr_reg_index(ctx, t0);
2971 t1 = tcg_const_i32(rD(ctx->opcode));
2972 t2 = tcg_const_i32(rA(ctx->opcode));
2973 t3 = tcg_const_i32(rB(ctx->opcode));
2974 gen_helper_lswx(t0, t1, t2, t3);
2975 tcg_temp_free(t0);
2976 tcg_temp_free_i32(t1);
2977 tcg_temp_free_i32(t2);
2978 tcg_temp_free_i32(t3);
2981 /* stswi */
2982 static void gen_stswi(DisasContext *ctx)
2984 TCGv t0;
2985 TCGv_i32 t1, t2;
2986 int nb = NB(ctx->opcode);
2987 gen_set_access_type(ctx, ACCESS_INT);
2988 /* NIP cannot be restored if the memory exception comes from an helper */
2989 gen_update_nip(ctx, ctx->nip - 4);
2990 t0 = tcg_temp_new();
2991 gen_addr_register(ctx, t0);
2992 if (nb == 0)
2993 nb = 32;
2994 t1 = tcg_const_i32(nb);
2995 t2 = tcg_const_i32(rS(ctx->opcode));
2996 gen_helper_stsw(t0, t1, t2);
2997 tcg_temp_free(t0);
2998 tcg_temp_free_i32(t1);
2999 tcg_temp_free_i32(t2);
3002 /* stswx */
3003 static void gen_stswx(DisasContext *ctx)
3005 TCGv t0;
3006 TCGv_i32 t1, t2;
3007 gen_set_access_type(ctx, ACCESS_INT);
3008 /* NIP cannot be restored if the memory exception comes from an helper */
3009 gen_update_nip(ctx, ctx->nip - 4);
3010 t0 = tcg_temp_new();
3011 gen_addr_reg_index(ctx, t0);
3012 t1 = tcg_temp_new_i32();
3013 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3014 tcg_gen_andi_i32(t1, t1, 0x7F);
3015 t2 = tcg_const_i32(rS(ctx->opcode));
3016 gen_helper_stsw(t0, t1, t2);
3017 tcg_temp_free(t0);
3018 tcg_temp_free_i32(t1);
3019 tcg_temp_free_i32(t2);
3022 /*** Memory synchronisation ***/
3023 /* eieio */
3024 static void gen_eieio(DisasContext *ctx)
3028 /* isync */
3029 static void gen_isync(DisasContext *ctx)
3031 gen_stop_exception(ctx);
3034 /* lwarx */
3035 static void gen_lwarx(DisasContext *ctx)
3037 TCGv t0;
3038 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3039 gen_set_access_type(ctx, ACCESS_RES);
3040 t0 = tcg_temp_local_new();
3041 gen_addr_reg_index(ctx, t0);
3042 gen_check_align(ctx, t0, 0x03);
3043 gen_qemu_ld32u(ctx, gpr, t0);
3044 tcg_gen_mov_tl(cpu_reserve, t0);
3045 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3046 tcg_temp_free(t0);
3049 #if defined(CONFIG_USER_ONLY)
3050 static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3051 int reg, int size)
3053 TCGv t0 = tcg_temp_new();
3054 uint32_t save_exception = ctx->exception;
3056 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUState, reserve_ea));
3057 tcg_gen_movi_tl(t0, (size << 5) | reg);
3058 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, reserve_info));
3059 tcg_temp_free(t0);
3060 gen_update_nip(ctx, ctx->nip-4);
3061 ctx->exception = POWERPC_EXCP_BRANCH;
3062 gen_exception(ctx, POWERPC_EXCP_STCX);
3063 ctx->exception = save_exception;
3065 #endif
3067 /* stwcx. */
3068 static void gen_stwcx_(DisasContext *ctx)
3070 TCGv t0;
3071 gen_set_access_type(ctx, ACCESS_RES);
3072 t0 = tcg_temp_local_new();
3073 gen_addr_reg_index(ctx, t0);
3074 gen_check_align(ctx, t0, 0x03);
3075 #if defined(CONFIG_USER_ONLY)
3076 gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3077 #else
3079 int l1;
3081 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3082 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3083 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3084 l1 = gen_new_label();
3085 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3086 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3087 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3088 gen_set_label(l1);
3089 tcg_gen_movi_tl(cpu_reserve, -1);
3091 #endif
3092 tcg_temp_free(t0);
3095 #if defined(TARGET_PPC64)
3096 /* ldarx */
3097 static void gen_ldarx(DisasContext *ctx)
3099 TCGv t0;
3100 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3101 gen_set_access_type(ctx, ACCESS_RES);
3102 t0 = tcg_temp_local_new();
3103 gen_addr_reg_index(ctx, t0);
3104 gen_check_align(ctx, t0, 0x07);
3105 gen_qemu_ld64(ctx, gpr, t0);
3106 tcg_gen_mov_tl(cpu_reserve, t0);
3107 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3108 tcg_temp_free(t0);
3111 /* stdcx. */
3112 static void gen_stdcx_(DisasContext *ctx)
3114 TCGv t0;
3115 gen_set_access_type(ctx, ACCESS_RES);
3116 t0 = tcg_temp_local_new();
3117 gen_addr_reg_index(ctx, t0);
3118 gen_check_align(ctx, t0, 0x07);
3119 #if defined(CONFIG_USER_ONLY)
3120 gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3121 #else
3123 int l1;
3124 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3125 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3126 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3127 l1 = gen_new_label();
3128 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3129 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3130 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3131 gen_set_label(l1);
3132 tcg_gen_movi_tl(cpu_reserve, -1);
3134 #endif
3135 tcg_temp_free(t0);
3137 #endif /* defined(TARGET_PPC64) */
3139 /* sync */
3140 static void gen_sync(DisasContext *ctx)
3144 /* wait */
3145 static void gen_wait(DisasContext *ctx)
3147 TCGv_i32 t0 = tcg_temp_new_i32();
3148 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3149 tcg_temp_free_i32(t0);
3150 /* Stop translation, as the CPU is supposed to sleep from now */
3151 gen_exception_err(ctx, EXCP_HLT, 1);
3154 /*** Floating-point load ***/
3155 #define GEN_LDF(name, ldop, opc, type) \
3156 static void glue(gen_, name)(DisasContext *ctx) \
3158 TCGv EA; \
3159 if (unlikely(!ctx->fpu_enabled)) { \
3160 gen_exception(ctx, POWERPC_EXCP_FPU); \
3161 return; \
3163 gen_set_access_type(ctx, ACCESS_FLOAT); \
3164 EA = tcg_temp_new(); \
3165 gen_addr_imm_index(ctx, EA, 0); \
3166 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3167 tcg_temp_free(EA); \
3170 #define GEN_LDUF(name, ldop, opc, type) \
3171 static void glue(gen_, name##u)(DisasContext *ctx) \
3173 TCGv EA; \
3174 if (unlikely(!ctx->fpu_enabled)) { \
3175 gen_exception(ctx, POWERPC_EXCP_FPU); \
3176 return; \
3178 if (unlikely(rA(ctx->opcode) == 0)) { \
3179 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3180 return; \
3182 gen_set_access_type(ctx, ACCESS_FLOAT); \
3183 EA = tcg_temp_new(); \
3184 gen_addr_imm_index(ctx, EA, 0); \
3185 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3186 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3187 tcg_temp_free(EA); \
3190 #define GEN_LDUXF(name, ldop, opc, type) \
3191 static void glue(gen_, name##ux)(DisasContext *ctx) \
3193 TCGv EA; \
3194 if (unlikely(!ctx->fpu_enabled)) { \
3195 gen_exception(ctx, POWERPC_EXCP_FPU); \
3196 return; \
3198 if (unlikely(rA(ctx->opcode) == 0)) { \
3199 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3200 return; \
3202 gen_set_access_type(ctx, ACCESS_FLOAT); \
3203 EA = tcg_temp_new(); \
3204 gen_addr_reg_index(ctx, EA); \
3205 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3206 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3207 tcg_temp_free(EA); \
3210 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3211 static void glue(gen_, name##x)(DisasContext *ctx) \
3213 TCGv EA; \
3214 if (unlikely(!ctx->fpu_enabled)) { \
3215 gen_exception(ctx, POWERPC_EXCP_FPU); \
3216 return; \
3218 gen_set_access_type(ctx, ACCESS_FLOAT); \
3219 EA = tcg_temp_new(); \
3220 gen_addr_reg_index(ctx, EA); \
3221 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3222 tcg_temp_free(EA); \
3225 #define GEN_LDFS(name, ldop, op, type) \
3226 GEN_LDF(name, ldop, op | 0x20, type); \
3227 GEN_LDUF(name, ldop, op | 0x21, type); \
3228 GEN_LDUXF(name, ldop, op | 0x01, type); \
3229 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3231 static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3233 TCGv t0 = tcg_temp_new();
3234 TCGv_i32 t1 = tcg_temp_new_i32();
3235 gen_qemu_ld32u(ctx, t0, arg2);
3236 tcg_gen_trunc_tl_i32(t1, t0);
3237 tcg_temp_free(t0);
3238 gen_helper_float32_to_float64(arg1, t1);
3239 tcg_temp_free_i32(t1);
3242 /* lfd lfdu lfdux lfdx */
3243 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3244 /* lfs lfsu lfsux lfsx */
3245 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3247 /*** Floating-point store ***/
3248 #define GEN_STF(name, stop, opc, type) \
3249 static void glue(gen_, name)(DisasContext *ctx) \
3251 TCGv EA; \
3252 if (unlikely(!ctx->fpu_enabled)) { \
3253 gen_exception(ctx, POWERPC_EXCP_FPU); \
3254 return; \
3256 gen_set_access_type(ctx, ACCESS_FLOAT); \
3257 EA = tcg_temp_new(); \
3258 gen_addr_imm_index(ctx, EA, 0); \
3259 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3260 tcg_temp_free(EA); \
3263 #define GEN_STUF(name, stop, opc, type) \
3264 static void glue(gen_, name##u)(DisasContext *ctx) \
3266 TCGv EA; \
3267 if (unlikely(!ctx->fpu_enabled)) { \
3268 gen_exception(ctx, POWERPC_EXCP_FPU); \
3269 return; \
3271 if (unlikely(rA(ctx->opcode) == 0)) { \
3272 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3273 return; \
3275 gen_set_access_type(ctx, ACCESS_FLOAT); \
3276 EA = tcg_temp_new(); \
3277 gen_addr_imm_index(ctx, EA, 0); \
3278 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3279 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3280 tcg_temp_free(EA); \
3283 #define GEN_STUXF(name, stop, opc, type) \
3284 static void glue(gen_, name##ux)(DisasContext *ctx) \
3286 TCGv EA; \
3287 if (unlikely(!ctx->fpu_enabled)) { \
3288 gen_exception(ctx, POWERPC_EXCP_FPU); \
3289 return; \
3291 if (unlikely(rA(ctx->opcode) == 0)) { \
3292 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3293 return; \
3295 gen_set_access_type(ctx, ACCESS_FLOAT); \
3296 EA = tcg_temp_new(); \
3297 gen_addr_reg_index(ctx, EA); \
3298 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3299 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3300 tcg_temp_free(EA); \
3303 #define GEN_STXF(name, stop, opc2, opc3, type) \
3304 static void glue(gen_, name##x)(DisasContext *ctx) \
3306 TCGv EA; \
3307 if (unlikely(!ctx->fpu_enabled)) { \
3308 gen_exception(ctx, POWERPC_EXCP_FPU); \
3309 return; \
3311 gen_set_access_type(ctx, ACCESS_FLOAT); \
3312 EA = tcg_temp_new(); \
3313 gen_addr_reg_index(ctx, EA); \
3314 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3315 tcg_temp_free(EA); \
3318 #define GEN_STFS(name, stop, op, type) \
3319 GEN_STF(name, stop, op | 0x20, type); \
3320 GEN_STUF(name, stop, op | 0x21, type); \
3321 GEN_STUXF(name, stop, op | 0x01, type); \
3322 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3324 static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3326 TCGv_i32 t0 = tcg_temp_new_i32();
3327 TCGv t1 = tcg_temp_new();
3328 gen_helper_float64_to_float32(t0, arg1);
3329 tcg_gen_extu_i32_tl(t1, t0);
3330 tcg_temp_free_i32(t0);
3331 gen_qemu_st32(ctx, t1, arg2);
3332 tcg_temp_free(t1);
3335 /* stfd stfdu stfdux stfdx */
3336 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3337 /* stfs stfsu stfsux stfsx */
3338 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3340 /* Optional: */
3341 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3343 TCGv t0 = tcg_temp_new();
3344 tcg_gen_trunc_i64_tl(t0, arg1),
3345 gen_qemu_st32(ctx, t0, arg2);
3346 tcg_temp_free(t0);
3348 /* stfiwx */
3349 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3351 /*** Branch ***/
3352 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3354 TranslationBlock *tb;
3355 tb = ctx->tb;
3356 #if defined(TARGET_PPC64)
3357 if (!ctx->sf_mode)
3358 dest = (uint32_t) dest;
3359 #endif
3360 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3361 likely(!ctx->singlestep_enabled)) {
3362 tcg_gen_goto_tb(n);
3363 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3364 tcg_gen_exit_tb((tcg_target_long)tb + n);
3365 } else {
3366 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3367 if (unlikely(ctx->singlestep_enabled)) {
3368 if ((ctx->singlestep_enabled &
3369 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3370 ctx->exception == POWERPC_EXCP_BRANCH) {
3371 target_ulong tmp = ctx->nip;
3372 ctx->nip = dest;
3373 gen_exception(ctx, POWERPC_EXCP_TRACE);
3374 ctx->nip = tmp;
3376 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3377 gen_debug_exception(ctx);
3380 tcg_gen_exit_tb(0);
3384 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3386 #if defined(TARGET_PPC64)
3387 if (ctx->sf_mode == 0)
3388 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3389 else
3390 #endif
3391 tcg_gen_movi_tl(cpu_lr, nip);
3394 /* b ba bl bla */
3395 static void gen_b(DisasContext *ctx)
3397 target_ulong li, target;
3399 ctx->exception = POWERPC_EXCP_BRANCH;
3400 /* sign extend LI */
3401 #if defined(TARGET_PPC64)
3402 if (ctx->sf_mode)
3403 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3404 else
3405 #endif
3406 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3407 if (likely(AA(ctx->opcode) == 0))
3408 target = ctx->nip + li - 4;
3409 else
3410 target = li;
3411 if (LK(ctx->opcode))
3412 gen_setlr(ctx, ctx->nip);
3413 gen_goto_tb(ctx, 0, target);
3416 #define BCOND_IM 0
3417 #define BCOND_LR 1
3418 #define BCOND_CTR 2
3420 static inline void gen_bcond(DisasContext *ctx, int type)
3422 uint32_t bo = BO(ctx->opcode);
3423 int l1;
3424 TCGv target;
3426 ctx->exception = POWERPC_EXCP_BRANCH;
3427 if (type == BCOND_LR || type == BCOND_CTR) {
3428 target = tcg_temp_local_new();
3429 if (type == BCOND_CTR)
3430 tcg_gen_mov_tl(target, cpu_ctr);
3431 else
3432 tcg_gen_mov_tl(target, cpu_lr);
3433 } else {
3434 TCGV_UNUSED(target);
3436 if (LK(ctx->opcode))
3437 gen_setlr(ctx, ctx->nip);
3438 l1 = gen_new_label();
3439 if ((bo & 0x4) == 0) {
3440 /* Decrement and test CTR */
3441 TCGv temp = tcg_temp_new();
3442 if (unlikely(type == BCOND_CTR)) {
3443 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3444 return;
3446 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3447 #if defined(TARGET_PPC64)
3448 if (!ctx->sf_mode)
3449 tcg_gen_ext32u_tl(temp, cpu_ctr);
3450 else
3451 #endif
3452 tcg_gen_mov_tl(temp, cpu_ctr);
3453 if (bo & 0x2) {
3454 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3455 } else {
3456 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3458 tcg_temp_free(temp);
3460 if ((bo & 0x10) == 0) {
3461 /* Test CR */
3462 uint32_t bi = BI(ctx->opcode);
3463 uint32_t mask = 1 << (3 - (bi & 0x03));
3464 TCGv_i32 temp = tcg_temp_new_i32();
3466 if (bo & 0x8) {
3467 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3468 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3469 } else {
3470 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3471 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3473 tcg_temp_free_i32(temp);
3475 if (type == BCOND_IM) {
3476 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3477 if (likely(AA(ctx->opcode) == 0)) {
3478 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3479 } else {
3480 gen_goto_tb(ctx, 0, li);
3482 gen_set_label(l1);
3483 gen_goto_tb(ctx, 1, ctx->nip);
3484 } else {
3485 #if defined(TARGET_PPC64)
3486 if (!(ctx->sf_mode))
3487 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3488 else
3489 #endif
3490 tcg_gen_andi_tl(cpu_nip, target, ~3);
3491 tcg_gen_exit_tb(0);
3492 gen_set_label(l1);
3493 #if defined(TARGET_PPC64)
3494 if (!(ctx->sf_mode))
3495 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3496 else
3497 #endif
3498 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3499 tcg_gen_exit_tb(0);
3503 static void gen_bc(DisasContext *ctx)
3505 gen_bcond(ctx, BCOND_IM);
3508 static void gen_bcctr(DisasContext *ctx)
3510 gen_bcond(ctx, BCOND_CTR);
3513 static void gen_bclr(DisasContext *ctx)
3515 gen_bcond(ctx, BCOND_LR);
3518 /*** Condition register logical ***/
3519 #define GEN_CRLOGIC(name, tcg_op, opc) \
3520 static void glue(gen_, name)(DisasContext *ctx) \
3522 uint8_t bitmask; \
3523 int sh; \
3524 TCGv_i32 t0, t1; \
3525 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3526 t0 = tcg_temp_new_i32(); \
3527 if (sh > 0) \
3528 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3529 else if (sh < 0) \
3530 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3531 else \
3532 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3533 t1 = tcg_temp_new_i32(); \
3534 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3535 if (sh > 0) \
3536 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3537 else if (sh < 0) \
3538 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3539 else \
3540 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3541 tcg_op(t0, t0, t1); \
3542 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3543 tcg_gen_andi_i32(t0, t0, bitmask); \
3544 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3545 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3546 tcg_temp_free_i32(t0); \
3547 tcg_temp_free_i32(t1); \
3550 /* crand */
3551 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3552 /* crandc */
3553 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3554 /* creqv */
3555 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3556 /* crnand */
3557 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3558 /* crnor */
3559 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3560 /* cror */
3561 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3562 /* crorc */
3563 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3564 /* crxor */
3565 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3567 /* mcrf */
3568 static void gen_mcrf(DisasContext *ctx)
3570 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3573 /*** System linkage ***/
3575 /* rfi (mem_idx only) */
3576 static void gen_rfi(DisasContext *ctx)
3578 #if defined(CONFIG_USER_ONLY)
3579 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3580 #else
3581 /* Restore CPU state */
3582 if (unlikely(!ctx->mem_idx)) {
3583 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3584 return;
3586 gen_helper_rfi();
3587 gen_sync_exception(ctx);
3588 #endif
3591 #if defined(TARGET_PPC64)
3592 static void gen_rfid(DisasContext *ctx)
3594 #if defined(CONFIG_USER_ONLY)
3595 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3596 #else
3597 /* Restore CPU state */
3598 if (unlikely(!ctx->mem_idx)) {
3599 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3600 return;
3602 gen_helper_rfid();
3603 gen_sync_exception(ctx);
3604 #endif
3607 static void gen_hrfid(DisasContext *ctx)
3609 #if defined(CONFIG_USER_ONLY)
3610 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3611 #else
3612 /* Restore CPU state */
3613 if (unlikely(ctx->mem_idx <= 1)) {
3614 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3615 return;
3617 gen_helper_hrfid();
3618 gen_sync_exception(ctx);
3619 #endif
3621 #endif
3623 /* sc */
3624 #if defined(CONFIG_USER_ONLY)
3625 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3626 #else
3627 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3628 #endif
3629 static void gen_sc(DisasContext *ctx)
3631 uint32_t lev;
3633 lev = (ctx->opcode >> 5) & 0x7F;
3634 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3637 /*** Trap ***/
3639 /* tw */
3640 static void gen_tw(DisasContext *ctx)
3642 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3643 /* Update the nip since this might generate a trap exception */
3644 gen_update_nip(ctx, ctx->nip);
3645 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3646 tcg_temp_free_i32(t0);
3649 /* twi */
3650 static void gen_twi(DisasContext *ctx)
3652 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3653 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3654 /* Update the nip since this might generate a trap exception */
3655 gen_update_nip(ctx, ctx->nip);
3656 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3657 tcg_temp_free(t0);
3658 tcg_temp_free_i32(t1);
3661 #if defined(TARGET_PPC64)
3662 /* td */
3663 static void gen_td(DisasContext *ctx)
3665 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3666 /* Update the nip since this might generate a trap exception */
3667 gen_update_nip(ctx, ctx->nip);
3668 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3669 tcg_temp_free_i32(t0);
3672 /* tdi */
3673 static void gen_tdi(DisasContext *ctx)
3675 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3676 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3677 /* Update the nip since this might generate a trap exception */
3678 gen_update_nip(ctx, ctx->nip);
3679 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3680 tcg_temp_free(t0);
3681 tcg_temp_free_i32(t1);
3683 #endif
3685 /*** Processor control ***/
3687 /* mcrxr */
3688 static void gen_mcrxr(DisasContext *ctx)
3690 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3691 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3692 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3695 /* mfcr mfocrf */
3696 static void gen_mfcr(DisasContext *ctx)
3698 uint32_t crm, crn;
3700 if (likely(ctx->opcode & 0x00100000)) {
3701 crm = CRM(ctx->opcode);
3702 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3703 crn = ctz32 (crm);
3704 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3705 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3706 cpu_gpr[rD(ctx->opcode)], crn * 4);
3708 } else {
3709 TCGv_i32 t0 = tcg_temp_new_i32();
3710 tcg_gen_mov_i32(t0, cpu_crf[0]);
3711 tcg_gen_shli_i32(t0, t0, 4);
3712 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3713 tcg_gen_shli_i32(t0, t0, 4);
3714 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3715 tcg_gen_shli_i32(t0, t0, 4);
3716 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3717 tcg_gen_shli_i32(t0, t0, 4);
3718 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3719 tcg_gen_shli_i32(t0, t0, 4);
3720 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3721 tcg_gen_shli_i32(t0, t0, 4);
3722 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3723 tcg_gen_shli_i32(t0, t0, 4);
3724 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3725 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3726 tcg_temp_free_i32(t0);
3730 /* mfmsr */
3731 static void gen_mfmsr(DisasContext *ctx)
3733 #if defined(CONFIG_USER_ONLY)
3734 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3735 #else
3736 if (unlikely(!ctx->mem_idx)) {
3737 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3738 return;
3740 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3741 #endif
3744 static void spr_noaccess(void *opaque, int gprn, int sprn)
3746 #if 0
3747 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3748 printf("ERROR: try to access SPR %d !\n", sprn);
3749 #endif
3751 #define SPR_NOACCESS (&spr_noaccess)
3753 /* mfspr */
3754 static inline void gen_op_mfspr(DisasContext *ctx)
3756 void (*read_cb)(void *opaque, int gprn, int sprn);
3757 uint32_t sprn = SPR(ctx->opcode);
3759 #if !defined(CONFIG_USER_ONLY)
3760 if (ctx->mem_idx == 2)
3761 read_cb = ctx->spr_cb[sprn].hea_read;
3762 else if (ctx->mem_idx)
3763 read_cb = ctx->spr_cb[sprn].oea_read;
3764 else
3765 #endif
3766 read_cb = ctx->spr_cb[sprn].uea_read;
3767 if (likely(read_cb != NULL)) {
3768 if (likely(read_cb != SPR_NOACCESS)) {
3769 (*read_cb)(ctx, rD(ctx->opcode), sprn);
3770 } else {
3771 /* Privilege exception */
3772 /* This is a hack to avoid warnings when running Linux:
3773 * this OS breaks the PowerPC virtualisation model,
3774 * allowing userland application to read the PVR
3776 if (sprn != SPR_PVR) {
3777 qemu_log("Trying to read privileged spr %d %03x at "
3778 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3779 printf("Trying to read privileged spr %d %03x at "
3780 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3782 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3784 } else {
3785 /* Not defined */
3786 qemu_log("Trying to read invalid spr %d %03x at "
3787 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3788 printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
3789 sprn, sprn, ctx->nip);
3790 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3794 static void gen_mfspr(DisasContext *ctx)
3796 gen_op_mfspr(ctx);
3799 /* mftb */
3800 static void gen_mftb(DisasContext *ctx)
3802 gen_op_mfspr(ctx);
3805 /* mtcrf mtocrf*/
3806 static void gen_mtcrf(DisasContext *ctx)
3808 uint32_t crm, crn;
3810 crm = CRM(ctx->opcode);
3811 if (likely((ctx->opcode & 0x00100000))) {
3812 if (crm && ((crm & (crm - 1)) == 0)) {
3813 TCGv_i32 temp = tcg_temp_new_i32();
3814 crn = ctz32 (crm);
3815 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3816 tcg_gen_shri_i32(temp, temp, crn * 4);
3817 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3818 tcg_temp_free_i32(temp);
3820 } else {
3821 TCGv_i32 temp = tcg_temp_new_i32();
3822 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3823 for (crn = 0 ; crn < 8 ; crn++) {
3824 if (crm & (1 << crn)) {
3825 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3826 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3829 tcg_temp_free_i32(temp);
3833 /* mtmsr */
3834 #if defined(TARGET_PPC64)
3835 static void gen_mtmsrd(DisasContext *ctx)
3837 #if defined(CONFIG_USER_ONLY)
3838 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3839 #else
3840 if (unlikely(!ctx->mem_idx)) {
3841 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3842 return;
3844 if (ctx->opcode & 0x00010000) {
3845 /* Special form that does not need any synchronisation */
3846 TCGv t0 = tcg_temp_new();
3847 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3848 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3849 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3850 tcg_temp_free(t0);
3851 } else {
3852 /* XXX: we need to update nip before the store
3853 * if we enter power saving mode, we will exit the loop
3854 * directly from ppc_store_msr
3856 gen_update_nip(ctx, ctx->nip);
3857 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3858 /* Must stop the translation as machine state (may have) changed */
3859 /* Note that mtmsr is not always defined as context-synchronizing */
3860 gen_stop_exception(ctx);
3862 #endif
3864 #endif
3866 static void gen_mtmsr(DisasContext *ctx)
3868 #if defined(CONFIG_USER_ONLY)
3869 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3870 #else
3871 if (unlikely(!ctx->mem_idx)) {
3872 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3873 return;
3875 if (ctx->opcode & 0x00010000) {
3876 /* Special form that does not need any synchronisation */
3877 TCGv t0 = tcg_temp_new();
3878 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3879 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3880 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3881 tcg_temp_free(t0);
3882 } else {
3883 /* XXX: we need to update nip before the store
3884 * if we enter power saving mode, we will exit the loop
3885 * directly from ppc_store_msr
3887 gen_update_nip(ctx, ctx->nip);
3888 #if defined(TARGET_PPC64)
3889 if (!ctx->sf_mode) {
3890 TCGv t0 = tcg_temp_new();
3891 TCGv t1 = tcg_temp_new();
3892 tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL);
3893 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
3894 tcg_gen_or_tl(t0, t0, t1);
3895 tcg_temp_free(t1);
3896 gen_helper_store_msr(t0);
3897 tcg_temp_free(t0);
3898 } else
3899 #endif
3900 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3901 /* Must stop the translation as machine state (may have) changed */
3902 /* Note that mtmsr is not always defined as context-synchronizing */
3903 gen_stop_exception(ctx);
3905 #endif
3908 /* mtspr */
3909 static void gen_mtspr(DisasContext *ctx)
3911 void (*write_cb)(void *opaque, int sprn, int gprn);
3912 uint32_t sprn = SPR(ctx->opcode);
3914 #if !defined(CONFIG_USER_ONLY)
3915 if (ctx->mem_idx == 2)
3916 write_cb = ctx->spr_cb[sprn].hea_write;
3917 else if (ctx->mem_idx)
3918 write_cb = ctx->spr_cb[sprn].oea_write;
3919 else
3920 #endif
3921 write_cb = ctx->spr_cb[sprn].uea_write;
3922 if (likely(write_cb != NULL)) {
3923 if (likely(write_cb != SPR_NOACCESS)) {
3924 (*write_cb)(ctx, sprn, rS(ctx->opcode));
3925 } else {
3926 /* Privilege exception */
3927 qemu_log("Trying to write privileged spr %d %03x at "
3928 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3929 printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
3930 "\n", sprn, sprn, ctx->nip);
3931 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3933 } else {
3934 /* Not defined */
3935 qemu_log("Trying to write invalid spr %d %03x at "
3936 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3937 printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
3938 sprn, sprn, ctx->nip);
3939 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3943 /*** Cache management ***/
3945 /* dcbf */
3946 static void gen_dcbf(DisasContext *ctx)
3948 /* XXX: specification says this is treated as a load by the MMU */
3949 TCGv t0;
3950 gen_set_access_type(ctx, ACCESS_CACHE);
3951 t0 = tcg_temp_new();
3952 gen_addr_reg_index(ctx, t0);
3953 gen_qemu_ld8u(ctx, t0, t0);
3954 tcg_temp_free(t0);
3957 /* dcbi (Supervisor only) */
3958 static void gen_dcbi(DisasContext *ctx)
3960 #if defined(CONFIG_USER_ONLY)
3961 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3962 #else
3963 TCGv EA, val;
3964 if (unlikely(!ctx->mem_idx)) {
3965 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3966 return;
3968 EA = tcg_temp_new();
3969 gen_set_access_type(ctx, ACCESS_CACHE);
3970 gen_addr_reg_index(ctx, EA);
3971 val = tcg_temp_new();
3972 /* XXX: specification says this should be treated as a store by the MMU */
3973 gen_qemu_ld8u(ctx, val, EA);
3974 gen_qemu_st8(ctx, val, EA);
3975 tcg_temp_free(val);
3976 tcg_temp_free(EA);
3977 #endif
3980 /* dcdst */
3981 static void gen_dcbst(DisasContext *ctx)
3983 /* XXX: specification say this is treated as a load by the MMU */
3984 TCGv t0;
3985 gen_set_access_type(ctx, ACCESS_CACHE);
3986 t0 = tcg_temp_new();
3987 gen_addr_reg_index(ctx, t0);
3988 gen_qemu_ld8u(ctx, t0, t0);
3989 tcg_temp_free(t0);
3992 /* dcbt */
3993 static void gen_dcbt(DisasContext *ctx)
3995 /* interpreted as no-op */
3996 /* XXX: specification say this is treated as a load by the MMU
3997 * but does not generate any exception
4001 /* dcbtst */
4002 static void gen_dcbtst(DisasContext *ctx)
4004 /* interpreted as no-op */
4005 /* XXX: specification say this is treated as a load by the MMU
4006 * but does not generate any exception
4010 /* dcbz */
4011 static void gen_dcbz(DisasContext *ctx)
4013 TCGv t0;
4014 gen_set_access_type(ctx, ACCESS_CACHE);
4015 /* NIP cannot be restored if the memory exception comes from an helper */
4016 gen_update_nip(ctx, ctx->nip - 4);
4017 t0 = tcg_temp_new();
4018 gen_addr_reg_index(ctx, t0);
4019 gen_helper_dcbz(t0);
4020 tcg_temp_free(t0);
4023 static void gen_dcbz_970(DisasContext *ctx)
4025 TCGv t0;
4026 gen_set_access_type(ctx, ACCESS_CACHE);
4027 /* NIP cannot be restored if the memory exception comes from an helper */
4028 gen_update_nip(ctx, ctx->nip - 4);
4029 t0 = tcg_temp_new();
4030 gen_addr_reg_index(ctx, t0);
4031 if (ctx->opcode & 0x00200000)
4032 gen_helper_dcbz(t0);
4033 else
4034 gen_helper_dcbz_970(t0);
4035 tcg_temp_free(t0);
4038 /* dst / dstt */
4039 static void gen_dst(DisasContext *ctx)
4041 if (rA(ctx->opcode) == 0) {
4042 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4043 } else {
4044 /* interpreted as no-op */
4048 /* dstst /dststt */
4049 static void gen_dstst(DisasContext *ctx)
4051 if (rA(ctx->opcode) == 0) {
4052 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4053 } else {
4054 /* interpreted as no-op */
4059 /* dss / dssall */
4060 static void gen_dss(DisasContext *ctx)
4062 /* interpreted as no-op */
4065 /* icbi */
4066 static void gen_icbi(DisasContext *ctx)
4068 TCGv t0;
4069 gen_set_access_type(ctx, ACCESS_CACHE);
4070 /* NIP cannot be restored if the memory exception comes from an helper */
4071 gen_update_nip(ctx, ctx->nip - 4);
4072 t0 = tcg_temp_new();
4073 gen_addr_reg_index(ctx, t0);
4074 gen_helper_icbi(t0);
4075 tcg_temp_free(t0);
4078 /* Optional: */
4079 /* dcba */
4080 static void gen_dcba(DisasContext *ctx)
4082 /* interpreted as no-op */
4083 /* XXX: specification say this is treated as a store by the MMU
4084 * but does not generate any exception
4088 /*** Segment register manipulation ***/
4089 /* Supervisor only: */
4091 /* mfsr */
4092 static void gen_mfsr(DisasContext *ctx)
4094 #if defined(CONFIG_USER_ONLY)
4095 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4096 #else
4097 TCGv t0;
4098 if (unlikely(!ctx->mem_idx)) {
4099 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4100 return;
4102 t0 = tcg_const_tl(SR(ctx->opcode));
4103 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4104 tcg_temp_free(t0);
4105 #endif
4108 /* mfsrin */
4109 static void gen_mfsrin(DisasContext *ctx)
4111 #if defined(CONFIG_USER_ONLY)
4112 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4113 #else
4114 TCGv t0;
4115 if (unlikely(!ctx->mem_idx)) {
4116 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4117 return;
4119 t0 = tcg_temp_new();
4120 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4121 tcg_gen_andi_tl(t0, t0, 0xF);
4122 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4123 tcg_temp_free(t0);
4124 #endif
4127 /* mtsr */
4128 static void gen_mtsr(DisasContext *ctx)
4130 #if defined(CONFIG_USER_ONLY)
4131 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4132 #else
4133 TCGv t0;
4134 if (unlikely(!ctx->mem_idx)) {
4135 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4136 return;
4138 t0 = tcg_const_tl(SR(ctx->opcode));
4139 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4140 tcg_temp_free(t0);
4141 #endif
4144 /* mtsrin */
4145 static void gen_mtsrin(DisasContext *ctx)
4147 #if defined(CONFIG_USER_ONLY)
4148 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4149 #else
4150 TCGv t0;
4151 if (unlikely(!ctx->mem_idx)) {
4152 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4153 return;
4155 t0 = tcg_temp_new();
4156 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4157 tcg_gen_andi_tl(t0, t0, 0xF);
4158 gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]);
4159 tcg_temp_free(t0);
4160 #endif
4163 #if defined(TARGET_PPC64)
4164 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4166 /* mfsr */
4167 static void gen_mfsr_64b(DisasContext *ctx)
4169 #if defined(CONFIG_USER_ONLY)
4170 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4171 #else
4172 TCGv t0;
4173 if (unlikely(!ctx->mem_idx)) {
4174 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4175 return;
4177 t0 = tcg_const_tl(SR(ctx->opcode));
4178 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4179 tcg_temp_free(t0);
4180 #endif
4183 /* mfsrin */
4184 static void gen_mfsrin_64b(DisasContext *ctx)
4186 #if defined(CONFIG_USER_ONLY)
4187 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4188 #else
4189 TCGv t0;
4190 if (unlikely(!ctx->mem_idx)) {
4191 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4192 return;
4194 t0 = tcg_temp_new();
4195 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4196 tcg_gen_andi_tl(t0, t0, 0xF);
4197 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4198 tcg_temp_free(t0);
4199 #endif
4202 /* mtsr */
4203 static void gen_mtsr_64b(DisasContext *ctx)
4205 #if defined(CONFIG_USER_ONLY)
4206 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4207 #else
4208 TCGv t0;
4209 if (unlikely(!ctx->mem_idx)) {
4210 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4211 return;
4213 t0 = tcg_const_tl(SR(ctx->opcode));
4214 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4215 tcg_temp_free(t0);
4216 #endif
4219 /* mtsrin */
4220 static void gen_mtsrin_64b(DisasContext *ctx)
4222 #if defined(CONFIG_USER_ONLY)
4223 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4224 #else
4225 TCGv t0;
4226 if (unlikely(!ctx->mem_idx)) {
4227 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4228 return;
4230 t0 = tcg_temp_new();
4231 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4232 tcg_gen_andi_tl(t0, t0, 0xF);
4233 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4234 tcg_temp_free(t0);
4235 #endif
4238 /* slbmte */
4239 static void gen_slbmte(DisasContext *ctx)
4241 #if defined(CONFIG_USER_ONLY)
4242 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4243 #else
4244 if (unlikely(!ctx->mem_idx)) {
4245 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4246 return;
4248 gen_helper_store_slb(cpu_gpr[rB(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
4249 #endif
4252 static void gen_slbmfee(DisasContext *ctx)
4254 #if defined(CONFIG_USER_ONLY)
4255 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4256 #else
4257 if (unlikely(!ctx->mem_idx)) {
4258 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4259 return;
4261 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)],
4262 cpu_gpr[rB(ctx->opcode)]);
4263 #endif
4266 static void gen_slbmfev(DisasContext *ctx)
4268 #if defined(CONFIG_USER_ONLY)
4269 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4270 #else
4271 if (unlikely(!ctx->mem_idx)) {
4272 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4273 return;
4275 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)],
4276 cpu_gpr[rB(ctx->opcode)]);
4277 #endif
4279 #endif /* defined(TARGET_PPC64) */
4281 /*** Lookaside buffer management ***/
4282 /* Optional & mem_idx only: */
4284 /* tlbia */
4285 static void gen_tlbia(DisasContext *ctx)
4287 #if defined(CONFIG_USER_ONLY)
4288 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4289 #else
4290 if (unlikely(!ctx->mem_idx)) {
4291 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4292 return;
4294 gen_helper_tlbia();
4295 #endif
4298 /* tlbiel */
4299 static void gen_tlbiel(DisasContext *ctx)
4301 #if defined(CONFIG_USER_ONLY)
4302 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4303 #else
4304 if (unlikely(!ctx->mem_idx)) {
4305 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4306 return;
4308 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4309 #endif
4312 /* tlbie */
4313 static void gen_tlbie(DisasContext *ctx)
4315 #if defined(CONFIG_USER_ONLY)
4316 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4317 #else
4318 if (unlikely(!ctx->mem_idx)) {
4319 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4320 return;
4322 #if defined(TARGET_PPC64)
4323 if (!ctx->sf_mode) {
4324 TCGv t0 = tcg_temp_new();
4325 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4326 gen_helper_tlbie(t0);
4327 tcg_temp_free(t0);
4328 } else
4329 #endif
4330 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4331 #endif
4334 /* tlbsync */
4335 static void gen_tlbsync(DisasContext *ctx)
4337 #if defined(CONFIG_USER_ONLY)
4338 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4339 #else
4340 if (unlikely(!ctx->mem_idx)) {
4341 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4342 return;
4344 /* This has no effect: it should ensure that all previous
4345 * tlbie have completed
4347 gen_stop_exception(ctx);
4348 #endif
4351 #if defined(TARGET_PPC64)
4352 /* slbia */
4353 static void gen_slbia(DisasContext *ctx)
4355 #if defined(CONFIG_USER_ONLY)
4356 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4357 #else
4358 if (unlikely(!ctx->mem_idx)) {
4359 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4360 return;
4362 gen_helper_slbia();
4363 #endif
4366 /* slbie */
4367 static void gen_slbie(DisasContext *ctx)
4369 #if defined(CONFIG_USER_ONLY)
4370 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4371 #else
4372 if (unlikely(!ctx->mem_idx)) {
4373 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4374 return;
4376 gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]);
4377 #endif
4379 #endif
4381 /*** External control ***/
4382 /* Optional: */
4384 /* eciwx */
4385 static void gen_eciwx(DisasContext *ctx)
4387 TCGv t0;
4388 /* Should check EAR[E] ! */
4389 gen_set_access_type(ctx, ACCESS_EXT);
4390 t0 = tcg_temp_new();
4391 gen_addr_reg_index(ctx, t0);
4392 gen_check_align(ctx, t0, 0x03);
4393 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4394 tcg_temp_free(t0);
4397 /* ecowx */
4398 static void gen_ecowx(DisasContext *ctx)
4400 TCGv t0;
4401 /* Should check EAR[E] ! */
4402 gen_set_access_type(ctx, ACCESS_EXT);
4403 t0 = tcg_temp_new();
4404 gen_addr_reg_index(ctx, t0);
4405 gen_check_align(ctx, t0, 0x03);
4406 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4407 tcg_temp_free(t0);
4410 /* PowerPC 601 specific instructions */
4412 /* abs - abs. */
4413 static void gen_abs(DisasContext *ctx)
4415 int l1 = gen_new_label();
4416 int l2 = gen_new_label();
4417 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4418 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4419 tcg_gen_br(l2);
4420 gen_set_label(l1);
4421 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4422 gen_set_label(l2);
4423 if (unlikely(Rc(ctx->opcode) != 0))
4424 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4427 /* abso - abso. */
4428 static void gen_abso(DisasContext *ctx)
4430 int l1 = gen_new_label();
4431 int l2 = gen_new_label();
4432 int l3 = gen_new_label();
4433 /* Start with XER OV disabled, the most likely case */
4434 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4435 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4436 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4437 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4438 tcg_gen_br(l2);
4439 gen_set_label(l1);
4440 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4441 tcg_gen_br(l3);
4442 gen_set_label(l2);
4443 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4444 gen_set_label(l3);
4445 if (unlikely(Rc(ctx->opcode) != 0))
4446 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4449 /* clcs */
4450 static void gen_clcs(DisasContext *ctx)
4452 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4453 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4454 tcg_temp_free_i32(t0);
4455 /* Rc=1 sets CR0 to an undefined state */
4458 /* div - div. */
4459 static void gen_div(DisasContext *ctx)
4461 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4462 if (unlikely(Rc(ctx->opcode) != 0))
4463 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4466 /* divo - divo. */
4467 static void gen_divo(DisasContext *ctx)
4469 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4470 if (unlikely(Rc(ctx->opcode) != 0))
4471 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4474 /* divs - divs. */
4475 static void gen_divs(DisasContext *ctx)
4477 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4478 if (unlikely(Rc(ctx->opcode) != 0))
4479 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4482 /* divso - divso. */
4483 static void gen_divso(DisasContext *ctx)
4485 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4486 if (unlikely(Rc(ctx->opcode) != 0))
4487 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4490 /* doz - doz. */
4491 static void gen_doz(DisasContext *ctx)
4493 int l1 = gen_new_label();
4494 int l2 = gen_new_label();
4495 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4496 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4497 tcg_gen_br(l2);
4498 gen_set_label(l1);
4499 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4500 gen_set_label(l2);
4501 if (unlikely(Rc(ctx->opcode) != 0))
4502 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4505 /* dozo - dozo. */
4506 static void gen_dozo(DisasContext *ctx)
4508 int l1 = gen_new_label();
4509 int l2 = gen_new_label();
4510 TCGv t0 = tcg_temp_new();
4511 TCGv t1 = tcg_temp_new();
4512 TCGv t2 = tcg_temp_new();
4513 /* Start with XER OV disabled, the most likely case */
4514 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4515 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4516 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4517 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4518 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4519 tcg_gen_andc_tl(t1, t1, t2);
4520 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4521 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4522 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4523 tcg_gen_br(l2);
4524 gen_set_label(l1);
4525 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4526 gen_set_label(l2);
4527 tcg_temp_free(t0);
4528 tcg_temp_free(t1);
4529 tcg_temp_free(t2);
4530 if (unlikely(Rc(ctx->opcode) != 0))
4531 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4534 /* dozi */
4535 static void gen_dozi(DisasContext *ctx)
4537 target_long simm = SIMM(ctx->opcode);
4538 int l1 = gen_new_label();
4539 int l2 = gen_new_label();
4540 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4541 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4542 tcg_gen_br(l2);
4543 gen_set_label(l1);
4544 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4545 gen_set_label(l2);
4546 if (unlikely(Rc(ctx->opcode) != 0))
4547 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4550 /* lscbx - lscbx. */
4551 static void gen_lscbx(DisasContext *ctx)
4553 TCGv t0 = tcg_temp_new();
4554 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4555 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4556 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4558 gen_addr_reg_index(ctx, t0);
4559 /* NIP cannot be restored if the memory exception comes from an helper */
4560 gen_update_nip(ctx, ctx->nip - 4);
4561 gen_helper_lscbx(t0, t0, t1, t2, t3);
4562 tcg_temp_free_i32(t1);
4563 tcg_temp_free_i32(t2);
4564 tcg_temp_free_i32(t3);
4565 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4566 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4567 if (unlikely(Rc(ctx->opcode) != 0))
4568 gen_set_Rc0(ctx, t0);
4569 tcg_temp_free(t0);
4572 /* maskg - maskg. */
4573 static void gen_maskg(DisasContext *ctx)
4575 int l1 = gen_new_label();
4576 TCGv t0 = tcg_temp_new();
4577 TCGv t1 = tcg_temp_new();
4578 TCGv t2 = tcg_temp_new();
4579 TCGv t3 = tcg_temp_new();
4580 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4581 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4582 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4583 tcg_gen_addi_tl(t2, t0, 1);
4584 tcg_gen_shr_tl(t2, t3, t2);
4585 tcg_gen_shr_tl(t3, t3, t1);
4586 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4587 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4588 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4589 gen_set_label(l1);
4590 tcg_temp_free(t0);
4591 tcg_temp_free(t1);
4592 tcg_temp_free(t2);
4593 tcg_temp_free(t3);
4594 if (unlikely(Rc(ctx->opcode) != 0))
4595 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4598 /* maskir - maskir. */
4599 static void gen_maskir(DisasContext *ctx)
4601 TCGv t0 = tcg_temp_new();
4602 TCGv t1 = tcg_temp_new();
4603 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4604 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4605 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4606 tcg_temp_free(t0);
4607 tcg_temp_free(t1);
4608 if (unlikely(Rc(ctx->opcode) != 0))
4609 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4612 /* mul - mul. */
4613 static void gen_mul(DisasContext *ctx)
4615 TCGv_i64 t0 = tcg_temp_new_i64();
4616 TCGv_i64 t1 = tcg_temp_new_i64();
4617 TCGv t2 = tcg_temp_new();
4618 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4619 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4620 tcg_gen_mul_i64(t0, t0, t1);
4621 tcg_gen_trunc_i64_tl(t2, t0);
4622 gen_store_spr(SPR_MQ, t2);
4623 tcg_gen_shri_i64(t1, t0, 32);
4624 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4625 tcg_temp_free_i64(t0);
4626 tcg_temp_free_i64(t1);
4627 tcg_temp_free(t2);
4628 if (unlikely(Rc(ctx->opcode) != 0))
4629 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4632 /* mulo - mulo. */
4633 static void gen_mulo(DisasContext *ctx)
4635 int l1 = gen_new_label();
4636 TCGv_i64 t0 = tcg_temp_new_i64();
4637 TCGv_i64 t1 = tcg_temp_new_i64();
4638 TCGv t2 = tcg_temp_new();
4639 /* Start with XER OV disabled, the most likely case */
4640 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4641 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4642 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4643 tcg_gen_mul_i64(t0, t0, t1);
4644 tcg_gen_trunc_i64_tl(t2, t0);
4645 gen_store_spr(SPR_MQ, t2);
4646 tcg_gen_shri_i64(t1, t0, 32);
4647 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4648 tcg_gen_ext32s_i64(t1, t0);
4649 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4650 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4651 gen_set_label(l1);
4652 tcg_temp_free_i64(t0);
4653 tcg_temp_free_i64(t1);
4654 tcg_temp_free(t2);
4655 if (unlikely(Rc(ctx->opcode) != 0))
4656 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4659 /* nabs - nabs. */
4660 static void gen_nabs(DisasContext *ctx)
4662 int l1 = gen_new_label();
4663 int l2 = gen_new_label();
4664 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4665 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4666 tcg_gen_br(l2);
4667 gen_set_label(l1);
4668 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4669 gen_set_label(l2);
4670 if (unlikely(Rc(ctx->opcode) != 0))
4671 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4674 /* nabso - nabso. */
4675 static void gen_nabso(DisasContext *ctx)
4677 int l1 = gen_new_label();
4678 int l2 = gen_new_label();
4679 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4680 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4681 tcg_gen_br(l2);
4682 gen_set_label(l1);
4683 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4684 gen_set_label(l2);
4685 /* nabs never overflows */
4686 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4687 if (unlikely(Rc(ctx->opcode) != 0))
4688 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4691 /* rlmi - rlmi. */
4692 static void gen_rlmi(DisasContext *ctx)
4694 uint32_t mb = MB(ctx->opcode);
4695 uint32_t me = ME(ctx->opcode);
4696 TCGv t0 = tcg_temp_new();
4697 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4698 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4699 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4700 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4701 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4702 tcg_temp_free(t0);
4703 if (unlikely(Rc(ctx->opcode) != 0))
4704 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4707 /* rrib - rrib. */
4708 static void gen_rrib(DisasContext *ctx)
4710 TCGv t0 = tcg_temp_new();
4711 TCGv t1 = tcg_temp_new();
4712 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4713 tcg_gen_movi_tl(t1, 0x80000000);
4714 tcg_gen_shr_tl(t1, t1, t0);
4715 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4716 tcg_gen_and_tl(t0, t0, t1);
4717 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4718 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4719 tcg_temp_free(t0);
4720 tcg_temp_free(t1);
4721 if (unlikely(Rc(ctx->opcode) != 0))
4722 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4725 /* sle - sle. */
4726 static void gen_sle(DisasContext *ctx)
4728 TCGv t0 = tcg_temp_new();
4729 TCGv t1 = tcg_temp_new();
4730 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4731 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4732 tcg_gen_subfi_tl(t1, 32, t1);
4733 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4734 tcg_gen_or_tl(t1, t0, t1);
4735 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4736 gen_store_spr(SPR_MQ, t1);
4737 tcg_temp_free(t0);
4738 tcg_temp_free(t1);
4739 if (unlikely(Rc(ctx->opcode) != 0))
4740 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4743 /* sleq - sleq. */
4744 static void gen_sleq(DisasContext *ctx)
4746 TCGv t0 = tcg_temp_new();
4747 TCGv t1 = tcg_temp_new();
4748 TCGv t2 = tcg_temp_new();
4749 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4750 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4751 tcg_gen_shl_tl(t2, t2, t0);
4752 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4753 gen_load_spr(t1, SPR_MQ);
4754 gen_store_spr(SPR_MQ, t0);
4755 tcg_gen_and_tl(t0, t0, t2);
4756 tcg_gen_andc_tl(t1, t1, t2);
4757 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4758 tcg_temp_free(t0);
4759 tcg_temp_free(t1);
4760 tcg_temp_free(t2);
4761 if (unlikely(Rc(ctx->opcode) != 0))
4762 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4765 /* sliq - sliq. */
4766 static void gen_sliq(DisasContext *ctx)
4768 int sh = SH(ctx->opcode);
4769 TCGv t0 = tcg_temp_new();
4770 TCGv t1 = tcg_temp_new();
4771 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4772 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4773 tcg_gen_or_tl(t1, t0, t1);
4774 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4775 gen_store_spr(SPR_MQ, t1);
4776 tcg_temp_free(t0);
4777 tcg_temp_free(t1);
4778 if (unlikely(Rc(ctx->opcode) != 0))
4779 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4782 /* slliq - slliq. */
4783 static void gen_slliq(DisasContext *ctx)
4785 int sh = SH(ctx->opcode);
4786 TCGv t0 = tcg_temp_new();
4787 TCGv t1 = tcg_temp_new();
4788 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4789 gen_load_spr(t1, SPR_MQ);
4790 gen_store_spr(SPR_MQ, t0);
4791 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4792 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4793 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4794 tcg_temp_free(t0);
4795 tcg_temp_free(t1);
4796 if (unlikely(Rc(ctx->opcode) != 0))
4797 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4800 /* sllq - sllq. */
4801 static void gen_sllq(DisasContext *ctx)
4803 int l1 = gen_new_label();
4804 int l2 = gen_new_label();
4805 TCGv t0 = tcg_temp_local_new();
4806 TCGv t1 = tcg_temp_local_new();
4807 TCGv t2 = tcg_temp_local_new();
4808 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4809 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4810 tcg_gen_shl_tl(t1, t1, t2);
4811 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4812 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4813 gen_load_spr(t0, SPR_MQ);
4814 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4815 tcg_gen_br(l2);
4816 gen_set_label(l1);
4817 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4818 gen_load_spr(t2, SPR_MQ);
4819 tcg_gen_andc_tl(t1, t2, t1);
4820 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4821 gen_set_label(l2);
4822 tcg_temp_free(t0);
4823 tcg_temp_free(t1);
4824 tcg_temp_free(t2);
4825 if (unlikely(Rc(ctx->opcode) != 0))
4826 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4829 /* slq - slq. */
4830 static void gen_slq(DisasContext *ctx)
4832 int l1 = gen_new_label();
4833 TCGv t0 = tcg_temp_new();
4834 TCGv t1 = tcg_temp_new();
4835 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4836 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4837 tcg_gen_subfi_tl(t1, 32, t1);
4838 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4839 tcg_gen_or_tl(t1, t0, t1);
4840 gen_store_spr(SPR_MQ, t1);
4841 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4842 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4843 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4844 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4845 gen_set_label(l1);
4846 tcg_temp_free(t0);
4847 tcg_temp_free(t1);
4848 if (unlikely(Rc(ctx->opcode) != 0))
4849 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4852 /* sraiq - sraiq. */
4853 static void gen_sraiq(DisasContext *ctx)
4855 int sh = SH(ctx->opcode);
4856 int l1 = gen_new_label();
4857 TCGv t0 = tcg_temp_new();
4858 TCGv t1 = tcg_temp_new();
4859 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4860 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4861 tcg_gen_or_tl(t0, t0, t1);
4862 gen_store_spr(SPR_MQ, t0);
4863 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4864 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4865 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4866 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4867 gen_set_label(l1);
4868 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4869 tcg_temp_free(t0);
4870 tcg_temp_free(t1);
4871 if (unlikely(Rc(ctx->opcode) != 0))
4872 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4875 /* sraq - sraq. */
4876 static void gen_sraq(DisasContext *ctx)
4878 int l1 = gen_new_label();
4879 int l2 = gen_new_label();
4880 TCGv t0 = tcg_temp_new();
4881 TCGv t1 = tcg_temp_local_new();
4882 TCGv t2 = tcg_temp_local_new();
4883 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4884 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4885 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4886 tcg_gen_subfi_tl(t2, 32, t2);
4887 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4888 tcg_gen_or_tl(t0, t0, t2);
4889 gen_store_spr(SPR_MQ, t0);
4890 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4891 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4892 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4893 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4894 gen_set_label(l1);
4895 tcg_temp_free(t0);
4896 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4897 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4898 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4899 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4900 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4901 gen_set_label(l2);
4902 tcg_temp_free(t1);
4903 tcg_temp_free(t2);
4904 if (unlikely(Rc(ctx->opcode) != 0))
4905 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4908 /* sre - sre. */
4909 static void gen_sre(DisasContext *ctx)
4911 TCGv t0 = tcg_temp_new();
4912 TCGv t1 = tcg_temp_new();
4913 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4914 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4915 tcg_gen_subfi_tl(t1, 32, t1);
4916 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4917 tcg_gen_or_tl(t1, t0, t1);
4918 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4919 gen_store_spr(SPR_MQ, t1);
4920 tcg_temp_free(t0);
4921 tcg_temp_free(t1);
4922 if (unlikely(Rc(ctx->opcode) != 0))
4923 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4926 /* srea - srea. */
4927 static void gen_srea(DisasContext *ctx)
4929 TCGv t0 = tcg_temp_new();
4930 TCGv t1 = tcg_temp_new();
4931 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4932 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4933 gen_store_spr(SPR_MQ, t0);
4934 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4935 tcg_temp_free(t0);
4936 tcg_temp_free(t1);
4937 if (unlikely(Rc(ctx->opcode) != 0))
4938 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4941 /* sreq */
4942 static void gen_sreq(DisasContext *ctx)
4944 TCGv t0 = tcg_temp_new();
4945 TCGv t1 = tcg_temp_new();
4946 TCGv t2 = tcg_temp_new();
4947 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4948 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4949 tcg_gen_shr_tl(t1, t1, t0);
4950 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4951 gen_load_spr(t2, SPR_MQ);
4952 gen_store_spr(SPR_MQ, t0);
4953 tcg_gen_and_tl(t0, t0, t1);
4954 tcg_gen_andc_tl(t2, t2, t1);
4955 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
4956 tcg_temp_free(t0);
4957 tcg_temp_free(t1);
4958 tcg_temp_free(t2);
4959 if (unlikely(Rc(ctx->opcode) != 0))
4960 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4963 /* sriq */
4964 static void gen_sriq(DisasContext *ctx)
4966 int sh = SH(ctx->opcode);
4967 TCGv t0 = tcg_temp_new();
4968 TCGv t1 = tcg_temp_new();
4969 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4970 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4971 tcg_gen_or_tl(t1, t0, t1);
4972 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4973 gen_store_spr(SPR_MQ, t1);
4974 tcg_temp_free(t0);
4975 tcg_temp_free(t1);
4976 if (unlikely(Rc(ctx->opcode) != 0))
4977 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4980 /* srliq */
4981 static void gen_srliq(DisasContext *ctx)
4983 int sh = SH(ctx->opcode);
4984 TCGv t0 = tcg_temp_new();
4985 TCGv t1 = tcg_temp_new();
4986 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4987 gen_load_spr(t1, SPR_MQ);
4988 gen_store_spr(SPR_MQ, t0);
4989 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
4990 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
4991 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4992 tcg_temp_free(t0);
4993 tcg_temp_free(t1);
4994 if (unlikely(Rc(ctx->opcode) != 0))
4995 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4998 /* srlq */
4999 static void gen_srlq(DisasContext *ctx)
5001 int l1 = gen_new_label();
5002 int l2 = gen_new_label();
5003 TCGv t0 = tcg_temp_local_new();
5004 TCGv t1 = tcg_temp_local_new();
5005 TCGv t2 = tcg_temp_local_new();
5006 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5007 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5008 tcg_gen_shr_tl(t2, t1, t2);
5009 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5010 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5011 gen_load_spr(t0, SPR_MQ);
5012 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5013 tcg_gen_br(l2);
5014 gen_set_label(l1);
5015 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5016 tcg_gen_and_tl(t0, t0, t2);
5017 gen_load_spr(t1, SPR_MQ);
5018 tcg_gen_andc_tl(t1, t1, t2);
5019 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5020 gen_set_label(l2);
5021 tcg_temp_free(t0);
5022 tcg_temp_free(t1);
5023 tcg_temp_free(t2);
5024 if (unlikely(Rc(ctx->opcode) != 0))
5025 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5028 /* srq */
5029 static void gen_srq(DisasContext *ctx)
5031 int l1 = gen_new_label();
5032 TCGv t0 = tcg_temp_new();
5033 TCGv t1 = tcg_temp_new();
5034 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5035 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5036 tcg_gen_subfi_tl(t1, 32, t1);
5037 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5038 tcg_gen_or_tl(t1, t0, t1);
5039 gen_store_spr(SPR_MQ, t1);
5040 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5041 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5042 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5043 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5044 gen_set_label(l1);
5045 tcg_temp_free(t0);
5046 tcg_temp_free(t1);
5047 if (unlikely(Rc(ctx->opcode) != 0))
5048 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5051 /* PowerPC 602 specific instructions */
5053 /* dsa */
5054 static void gen_dsa(DisasContext *ctx)
5056 /* XXX: TODO */
5057 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5060 /* esa */
5061 static void gen_esa(DisasContext *ctx)
5063 /* XXX: TODO */
5064 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5067 /* mfrom */
5068 static void gen_mfrom(DisasContext *ctx)
5070 #if defined(CONFIG_USER_ONLY)
5071 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5072 #else
5073 if (unlikely(!ctx->mem_idx)) {
5074 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5075 return;
5077 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5078 #endif
5081 /* 602 - 603 - G2 TLB management */
5083 /* tlbld */
5084 static void gen_tlbld_6xx(DisasContext *ctx)
5086 #if defined(CONFIG_USER_ONLY)
5087 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5088 #else
5089 if (unlikely(!ctx->mem_idx)) {
5090 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5091 return;
5093 gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5094 #endif
5097 /* tlbli */
5098 static void gen_tlbli_6xx(DisasContext *ctx)
5100 #if defined(CONFIG_USER_ONLY)
5101 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5102 #else
5103 if (unlikely(!ctx->mem_idx)) {
5104 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5105 return;
5107 gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5108 #endif
5111 /* 74xx TLB management */
5113 /* tlbld */
5114 static void gen_tlbld_74xx(DisasContext *ctx)
5116 #if defined(CONFIG_USER_ONLY)
5117 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5118 #else
5119 if (unlikely(!ctx->mem_idx)) {
5120 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5121 return;
5123 gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5124 #endif
5127 /* tlbli */
5128 static void gen_tlbli_74xx(DisasContext *ctx)
5130 #if defined(CONFIG_USER_ONLY)
5131 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5132 #else
5133 if (unlikely(!ctx->mem_idx)) {
5134 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5135 return;
5137 gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5138 #endif
5141 /* POWER instructions not in PowerPC 601 */
5143 /* clf */
5144 static void gen_clf(DisasContext *ctx)
5146 /* Cache line flush: implemented as no-op */
5149 /* cli */
5150 static void gen_cli(DisasContext *ctx)
5152 /* Cache line invalidate: privileged and treated as no-op */
5153 #if defined(CONFIG_USER_ONLY)
5154 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5155 #else
5156 if (unlikely(!ctx->mem_idx)) {
5157 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5158 return;
5160 #endif
5163 /* dclst */
5164 static void gen_dclst(DisasContext *ctx)
5166 /* Data cache line store: treated as no-op */
5169 static void gen_mfsri(DisasContext *ctx)
5171 #if defined(CONFIG_USER_ONLY)
5172 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5173 #else
5174 int ra = rA(ctx->opcode);
5175 int rd = rD(ctx->opcode);
5176 TCGv t0;
5177 if (unlikely(!ctx->mem_idx)) {
5178 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5179 return;
5181 t0 = tcg_temp_new();
5182 gen_addr_reg_index(ctx, t0);
5183 tcg_gen_shri_tl(t0, t0, 28);
5184 tcg_gen_andi_tl(t0, t0, 0xF);
5185 gen_helper_load_sr(cpu_gpr[rd], t0);
5186 tcg_temp_free(t0);
5187 if (ra != 0 && ra != rd)
5188 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5189 #endif
5192 static void gen_rac(DisasContext *ctx)
5194 #if defined(CONFIG_USER_ONLY)
5195 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5196 #else
5197 TCGv t0;
5198 if (unlikely(!ctx->mem_idx)) {
5199 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5200 return;
5202 t0 = tcg_temp_new();
5203 gen_addr_reg_index(ctx, t0);
5204 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5205 tcg_temp_free(t0);
5206 #endif
5209 static void gen_rfsvc(DisasContext *ctx)
5211 #if defined(CONFIG_USER_ONLY)
5212 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5213 #else
5214 if (unlikely(!ctx->mem_idx)) {
5215 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5216 return;
5218 gen_helper_rfsvc();
5219 gen_sync_exception(ctx);
5220 #endif
5223 /* svc is not implemented for now */
5225 /* POWER2 specific instructions */
5226 /* Quad manipulation (load/store two floats at a time) */
5228 /* lfq */
5229 static void gen_lfq(DisasContext *ctx)
5231 int rd = rD(ctx->opcode);
5232 TCGv t0;
5233 gen_set_access_type(ctx, ACCESS_FLOAT);
5234 t0 = tcg_temp_new();
5235 gen_addr_imm_index(ctx, t0, 0);
5236 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5237 gen_addr_add(ctx, t0, t0, 8);
5238 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5239 tcg_temp_free(t0);
5242 /* lfqu */
5243 static void gen_lfqu(DisasContext *ctx)
5245 int ra = rA(ctx->opcode);
5246 int rd = rD(ctx->opcode);
5247 TCGv t0, t1;
5248 gen_set_access_type(ctx, ACCESS_FLOAT);
5249 t0 = tcg_temp_new();
5250 t1 = tcg_temp_new();
5251 gen_addr_imm_index(ctx, t0, 0);
5252 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5253 gen_addr_add(ctx, t1, t0, 8);
5254 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5255 if (ra != 0)
5256 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5257 tcg_temp_free(t0);
5258 tcg_temp_free(t1);
5261 /* lfqux */
5262 static void gen_lfqux(DisasContext *ctx)
5264 int ra = rA(ctx->opcode);
5265 int rd = rD(ctx->opcode);
5266 gen_set_access_type(ctx, ACCESS_FLOAT);
5267 TCGv t0, t1;
5268 t0 = tcg_temp_new();
5269 gen_addr_reg_index(ctx, t0);
5270 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5271 t1 = tcg_temp_new();
5272 gen_addr_add(ctx, t1, t0, 8);
5273 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5274 tcg_temp_free(t1);
5275 if (ra != 0)
5276 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5277 tcg_temp_free(t0);
5280 /* lfqx */
5281 static void gen_lfqx(DisasContext *ctx)
5283 int rd = rD(ctx->opcode);
5284 TCGv t0;
5285 gen_set_access_type(ctx, ACCESS_FLOAT);
5286 t0 = tcg_temp_new();
5287 gen_addr_reg_index(ctx, t0);
5288 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5289 gen_addr_add(ctx, t0, t0, 8);
5290 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5291 tcg_temp_free(t0);
5294 /* stfq */
5295 static void gen_stfq(DisasContext *ctx)
5297 int rd = rD(ctx->opcode);
5298 TCGv t0;
5299 gen_set_access_type(ctx, ACCESS_FLOAT);
5300 t0 = tcg_temp_new();
5301 gen_addr_imm_index(ctx, t0, 0);
5302 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5303 gen_addr_add(ctx, t0, t0, 8);
5304 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5305 tcg_temp_free(t0);
5308 /* stfqu */
5309 static void gen_stfqu(DisasContext *ctx)
5311 int ra = rA(ctx->opcode);
5312 int rd = rD(ctx->opcode);
5313 TCGv t0, t1;
5314 gen_set_access_type(ctx, ACCESS_FLOAT);
5315 t0 = tcg_temp_new();
5316 gen_addr_imm_index(ctx, t0, 0);
5317 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5318 t1 = tcg_temp_new();
5319 gen_addr_add(ctx, t1, t0, 8);
5320 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5321 tcg_temp_free(t1);
5322 if (ra != 0)
5323 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5324 tcg_temp_free(t0);
5327 /* stfqux */
5328 static void gen_stfqux(DisasContext *ctx)
5330 int ra = rA(ctx->opcode);
5331 int rd = rD(ctx->opcode);
5332 TCGv t0, t1;
5333 gen_set_access_type(ctx, ACCESS_FLOAT);
5334 t0 = tcg_temp_new();
5335 gen_addr_reg_index(ctx, t0);
5336 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5337 t1 = tcg_temp_new();
5338 gen_addr_add(ctx, t1, t0, 8);
5339 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5340 tcg_temp_free(t1);
5341 if (ra != 0)
5342 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5343 tcg_temp_free(t0);
5346 /* stfqx */
5347 static void gen_stfqx(DisasContext *ctx)
5349 int rd = rD(ctx->opcode);
5350 TCGv t0;
5351 gen_set_access_type(ctx, ACCESS_FLOAT);
5352 t0 = tcg_temp_new();
5353 gen_addr_reg_index(ctx, t0);
5354 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5355 gen_addr_add(ctx, t0, t0, 8);
5356 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5357 tcg_temp_free(t0);
5360 /* BookE specific instructions */
5362 /* XXX: not implemented on 440 ? */
5363 static void gen_mfapidi(DisasContext *ctx)
5365 /* XXX: TODO */
5366 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5369 /* XXX: not implemented on 440 ? */
5370 static void gen_tlbiva(DisasContext *ctx)
5372 #if defined(CONFIG_USER_ONLY)
5373 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5374 #else
5375 TCGv t0;
5376 if (unlikely(!ctx->mem_idx)) {
5377 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5378 return;
5380 t0 = tcg_temp_new();
5381 gen_addr_reg_index(ctx, t0);
5382 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
5383 tcg_temp_free(t0);
5384 #endif
5387 /* All 405 MAC instructions are translated here */
5388 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5389 int ra, int rb, int rt, int Rc)
5391 TCGv t0, t1;
5393 t0 = tcg_temp_local_new();
5394 t1 = tcg_temp_local_new();
5396 switch (opc3 & 0x0D) {
5397 case 0x05:
5398 /* macchw - macchw. - macchwo - macchwo. */
5399 /* macchws - macchws. - macchwso - macchwso. */
5400 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5401 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5402 /* mulchw - mulchw. */
5403 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5404 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5405 tcg_gen_ext16s_tl(t1, t1);
5406 break;
5407 case 0x04:
5408 /* macchwu - macchwu. - macchwuo - macchwuo. */
5409 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5410 /* mulchwu - mulchwu. */
5411 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5412 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5413 tcg_gen_ext16u_tl(t1, t1);
5414 break;
5415 case 0x01:
5416 /* machhw - machhw. - machhwo - machhwo. */
5417 /* machhws - machhws. - machhwso - machhwso. */
5418 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5419 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5420 /* mulhhw - mulhhw. */
5421 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5422 tcg_gen_ext16s_tl(t0, t0);
5423 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5424 tcg_gen_ext16s_tl(t1, t1);
5425 break;
5426 case 0x00:
5427 /* machhwu - machhwu. - machhwuo - machhwuo. */
5428 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5429 /* mulhhwu - mulhhwu. */
5430 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5431 tcg_gen_ext16u_tl(t0, t0);
5432 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5433 tcg_gen_ext16u_tl(t1, t1);
5434 break;
5435 case 0x0D:
5436 /* maclhw - maclhw. - maclhwo - maclhwo. */
5437 /* maclhws - maclhws. - maclhwso - maclhwso. */
5438 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5439 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5440 /* mullhw - mullhw. */
5441 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5442 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5443 break;
5444 case 0x0C:
5445 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5446 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5447 /* mullhwu - mullhwu. */
5448 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5449 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5450 break;
5452 if (opc2 & 0x04) {
5453 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5454 tcg_gen_mul_tl(t1, t0, t1);
5455 if (opc2 & 0x02) {
5456 /* nmultiply-and-accumulate (0x0E) */
5457 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5458 } else {
5459 /* multiply-and-accumulate (0x0C) */
5460 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5463 if (opc3 & 0x12) {
5464 /* Check overflow and/or saturate */
5465 int l1 = gen_new_label();
5467 if (opc3 & 0x10) {
5468 /* Start with XER OV disabled, the most likely case */
5469 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5471 if (opc3 & 0x01) {
5472 /* Signed */
5473 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5474 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5475 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5476 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5477 if (opc3 & 0x02) {
5478 /* Saturate */
5479 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5480 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5482 } else {
5483 /* Unsigned */
5484 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5485 if (opc3 & 0x02) {
5486 /* Saturate */
5487 tcg_gen_movi_tl(t0, UINT32_MAX);
5490 if (opc3 & 0x10) {
5491 /* Check overflow */
5492 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5494 gen_set_label(l1);
5495 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5497 } else {
5498 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5500 tcg_temp_free(t0);
5501 tcg_temp_free(t1);
5502 if (unlikely(Rc) != 0) {
5503 /* Update Rc0 */
5504 gen_set_Rc0(ctx, cpu_gpr[rt]);
5508 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5509 static void glue(gen_, name)(DisasContext *ctx) \
5511 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5512 rD(ctx->opcode), Rc(ctx->opcode)); \
5515 /* macchw - macchw. */
5516 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5517 /* macchwo - macchwo. */
5518 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5519 /* macchws - macchws. */
5520 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5521 /* macchwso - macchwso. */
5522 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5523 /* macchwsu - macchwsu. */
5524 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5525 /* macchwsuo - macchwsuo. */
5526 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5527 /* macchwu - macchwu. */
5528 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5529 /* macchwuo - macchwuo. */
5530 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5531 /* machhw - machhw. */
5532 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5533 /* machhwo - machhwo. */
5534 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5535 /* machhws - machhws. */
5536 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5537 /* machhwso - machhwso. */
5538 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5539 /* machhwsu - machhwsu. */
5540 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5541 /* machhwsuo - machhwsuo. */
5542 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5543 /* machhwu - machhwu. */
5544 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5545 /* machhwuo - machhwuo. */
5546 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5547 /* maclhw - maclhw. */
5548 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5549 /* maclhwo - maclhwo. */
5550 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5551 /* maclhws - maclhws. */
5552 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5553 /* maclhwso - maclhwso. */
5554 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5555 /* maclhwu - maclhwu. */
5556 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5557 /* maclhwuo - maclhwuo. */
5558 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5559 /* maclhwsu - maclhwsu. */
5560 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5561 /* maclhwsuo - maclhwsuo. */
5562 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5563 /* nmacchw - nmacchw. */
5564 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5565 /* nmacchwo - nmacchwo. */
5566 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5567 /* nmacchws - nmacchws. */
5568 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5569 /* nmacchwso - nmacchwso. */
5570 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5571 /* nmachhw - nmachhw. */
5572 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5573 /* nmachhwo - nmachhwo. */
5574 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5575 /* nmachhws - nmachhws. */
5576 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5577 /* nmachhwso - nmachhwso. */
5578 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5579 /* nmaclhw - nmaclhw. */
5580 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5581 /* nmaclhwo - nmaclhwo. */
5582 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5583 /* nmaclhws - nmaclhws. */
5584 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5585 /* nmaclhwso - nmaclhwso. */
5586 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5588 /* mulchw - mulchw. */
5589 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5590 /* mulchwu - mulchwu. */
5591 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5592 /* mulhhw - mulhhw. */
5593 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5594 /* mulhhwu - mulhhwu. */
5595 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5596 /* mullhw - mullhw. */
5597 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5598 /* mullhwu - mullhwu. */
5599 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5601 /* mfdcr */
5602 static void gen_mfdcr(DisasContext *ctx)
5604 #if defined(CONFIG_USER_ONLY)
5605 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5606 #else
5607 TCGv dcrn;
5608 if (unlikely(!ctx->mem_idx)) {
5609 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5610 return;
5612 /* NIP cannot be restored if the memory exception comes from an helper */
5613 gen_update_nip(ctx, ctx->nip - 4);
5614 dcrn = tcg_const_tl(SPR(ctx->opcode));
5615 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5616 tcg_temp_free(dcrn);
5617 #endif
5620 /* mtdcr */
5621 static void gen_mtdcr(DisasContext *ctx)
5623 #if defined(CONFIG_USER_ONLY)
5624 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5625 #else
5626 TCGv dcrn;
5627 if (unlikely(!ctx->mem_idx)) {
5628 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5629 return;
5631 /* NIP cannot be restored if the memory exception comes from an helper */
5632 gen_update_nip(ctx, ctx->nip - 4);
5633 dcrn = tcg_const_tl(SPR(ctx->opcode));
5634 gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5635 tcg_temp_free(dcrn);
5636 #endif
5639 /* mfdcrx */
5640 /* XXX: not implemented on 440 ? */
5641 static void gen_mfdcrx(DisasContext *ctx)
5643 #if defined(CONFIG_USER_ONLY)
5644 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5645 #else
5646 if (unlikely(!ctx->mem_idx)) {
5647 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5648 return;
5650 /* NIP cannot be restored if the memory exception comes from an helper */
5651 gen_update_nip(ctx, ctx->nip - 4);
5652 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5653 /* Note: Rc update flag set leads to undefined state of Rc0 */
5654 #endif
5657 /* mtdcrx */
5658 /* XXX: not implemented on 440 ? */
5659 static void gen_mtdcrx(DisasContext *ctx)
5661 #if defined(CONFIG_USER_ONLY)
5662 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5663 #else
5664 if (unlikely(!ctx->mem_idx)) {
5665 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5666 return;
5668 /* NIP cannot be restored if the memory exception comes from an helper */
5669 gen_update_nip(ctx, ctx->nip - 4);
5670 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5671 /* Note: Rc update flag set leads to undefined state of Rc0 */
5672 #endif
5675 /* mfdcrux (PPC 460) : user-mode access to DCR */
5676 static void gen_mfdcrux(DisasContext *ctx)
5678 /* NIP cannot be restored if the memory exception comes from an helper */
5679 gen_update_nip(ctx, ctx->nip - 4);
5680 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5681 /* Note: Rc update flag set leads to undefined state of Rc0 */
5684 /* mtdcrux (PPC 460) : user-mode access to DCR */
5685 static void gen_mtdcrux(DisasContext *ctx)
5687 /* NIP cannot be restored if the memory exception comes from an helper */
5688 gen_update_nip(ctx, ctx->nip - 4);
5689 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5690 /* Note: Rc update flag set leads to undefined state of Rc0 */
5693 /* dccci */
5694 static void gen_dccci(DisasContext *ctx)
5696 #if defined(CONFIG_USER_ONLY)
5697 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5698 #else
5699 if (unlikely(!ctx->mem_idx)) {
5700 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5701 return;
5703 /* interpreted as no-op */
5704 #endif
5707 /* dcread */
5708 static void gen_dcread(DisasContext *ctx)
5710 #if defined(CONFIG_USER_ONLY)
5711 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5712 #else
5713 TCGv EA, val;
5714 if (unlikely(!ctx->mem_idx)) {
5715 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5716 return;
5718 gen_set_access_type(ctx, ACCESS_CACHE);
5719 EA = tcg_temp_new();
5720 gen_addr_reg_index(ctx, EA);
5721 val = tcg_temp_new();
5722 gen_qemu_ld32u(ctx, val, EA);
5723 tcg_temp_free(val);
5724 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5725 tcg_temp_free(EA);
5726 #endif
5729 /* icbt */
5730 static void gen_icbt_40x(DisasContext *ctx)
5732 /* interpreted as no-op */
5733 /* XXX: specification say this is treated as a load by the MMU
5734 * but does not generate any exception
5738 /* iccci */
5739 static void gen_iccci(DisasContext *ctx)
5741 #if defined(CONFIG_USER_ONLY)
5742 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5743 #else
5744 if (unlikely(!ctx->mem_idx)) {
5745 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5746 return;
5748 /* interpreted as no-op */
5749 #endif
5752 /* icread */
5753 static void gen_icread(DisasContext *ctx)
5755 #if defined(CONFIG_USER_ONLY)
5756 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5757 #else
5758 if (unlikely(!ctx->mem_idx)) {
5759 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5760 return;
5762 /* interpreted as no-op */
5763 #endif
5766 /* rfci (mem_idx only) */
5767 static void gen_rfci_40x(DisasContext *ctx)
5769 #if defined(CONFIG_USER_ONLY)
5770 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5771 #else
5772 if (unlikely(!ctx->mem_idx)) {
5773 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5774 return;
5776 /* Restore CPU state */
5777 gen_helper_40x_rfci();
5778 gen_sync_exception(ctx);
5779 #endif
5782 static void gen_rfci(DisasContext *ctx)
5784 #if defined(CONFIG_USER_ONLY)
5785 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5786 #else
5787 if (unlikely(!ctx->mem_idx)) {
5788 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5789 return;
5791 /* Restore CPU state */
5792 gen_helper_rfci();
5793 gen_sync_exception(ctx);
5794 #endif
5797 /* BookE specific */
5799 /* XXX: not implemented on 440 ? */
5800 static void gen_rfdi(DisasContext *ctx)
5802 #if defined(CONFIG_USER_ONLY)
5803 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5804 #else
5805 if (unlikely(!ctx->mem_idx)) {
5806 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5807 return;
5809 /* Restore CPU state */
5810 gen_helper_rfdi();
5811 gen_sync_exception(ctx);
5812 #endif
5815 /* XXX: not implemented on 440 ? */
5816 static void gen_rfmci(DisasContext *ctx)
5818 #if defined(CONFIG_USER_ONLY)
5819 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5820 #else
5821 if (unlikely(!ctx->mem_idx)) {
5822 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5823 return;
5825 /* Restore CPU state */
5826 gen_helper_rfmci();
5827 gen_sync_exception(ctx);
5828 #endif
5831 /* TLB management - PowerPC 405 implementation */
5833 /* tlbre */
5834 static void gen_tlbre_40x(DisasContext *ctx)
5836 #if defined(CONFIG_USER_ONLY)
5837 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5838 #else
5839 if (unlikely(!ctx->mem_idx)) {
5840 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5841 return;
5843 switch (rB(ctx->opcode)) {
5844 case 0:
5845 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5846 break;
5847 case 1:
5848 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5849 break;
5850 default:
5851 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5852 break;
5854 #endif
5857 /* tlbsx - tlbsx. */
5858 static void gen_tlbsx_40x(DisasContext *ctx)
5860 #if defined(CONFIG_USER_ONLY)
5861 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5862 #else
5863 TCGv t0;
5864 if (unlikely(!ctx->mem_idx)) {
5865 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5866 return;
5868 t0 = tcg_temp_new();
5869 gen_addr_reg_index(ctx, t0);
5870 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5871 tcg_temp_free(t0);
5872 if (Rc(ctx->opcode)) {
5873 int l1 = gen_new_label();
5874 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5875 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5876 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5877 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5878 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5879 gen_set_label(l1);
5881 #endif
5884 /* tlbwe */
5885 static void gen_tlbwe_40x(DisasContext *ctx)
5887 #if defined(CONFIG_USER_ONLY)
5888 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5889 #else
5890 if (unlikely(!ctx->mem_idx)) {
5891 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5892 return;
5894 switch (rB(ctx->opcode)) {
5895 case 0:
5896 gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5897 break;
5898 case 1:
5899 gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5900 break;
5901 default:
5902 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5903 break;
5905 #endif
5908 /* TLB management - PowerPC 440 implementation */
5910 /* tlbre */
5911 static void gen_tlbre_440(DisasContext *ctx)
5913 #if defined(CONFIG_USER_ONLY)
5914 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5915 #else
5916 if (unlikely(!ctx->mem_idx)) {
5917 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5918 return;
5920 switch (rB(ctx->opcode)) {
5921 case 0:
5922 case 1:
5923 case 2:
5925 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5926 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], t0, cpu_gpr[rA(ctx->opcode)]);
5927 tcg_temp_free_i32(t0);
5929 break;
5930 default:
5931 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5932 break;
5934 #endif
5937 /* tlbsx - tlbsx. */
5938 static void gen_tlbsx_440(DisasContext *ctx)
5940 #if defined(CONFIG_USER_ONLY)
5941 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5942 #else
5943 TCGv t0;
5944 if (unlikely(!ctx->mem_idx)) {
5945 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5946 return;
5948 t0 = tcg_temp_new();
5949 gen_addr_reg_index(ctx, t0);
5950 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5951 tcg_temp_free(t0);
5952 if (Rc(ctx->opcode)) {
5953 int l1 = gen_new_label();
5954 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5955 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5956 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5957 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5958 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5959 gen_set_label(l1);
5961 #endif
5964 /* tlbwe */
5965 static void gen_tlbwe_440(DisasContext *ctx)
5967 #if defined(CONFIG_USER_ONLY)
5968 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5969 #else
5970 if (unlikely(!ctx->mem_idx)) {
5971 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5972 return;
5974 switch (rB(ctx->opcode)) {
5975 case 0:
5976 case 1:
5977 case 2:
5979 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5980 gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5981 tcg_temp_free_i32(t0);
5983 break;
5984 default:
5985 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5986 break;
5988 #endif
5991 /* TLB management - PowerPC BookE 2.06 implementation */
5993 /* tlbre */
5994 static void gen_tlbre_booke206(DisasContext *ctx)
5996 #if defined(CONFIG_USER_ONLY)
5997 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5998 #else
5999 if (unlikely(!ctx->mem_idx)) {
6000 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6001 return;
6004 gen_helper_booke206_tlbre();
6005 #endif
6008 /* tlbsx - tlbsx. */
6009 static void gen_tlbsx_booke206(DisasContext *ctx)
6011 #if defined(CONFIG_USER_ONLY)
6012 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6013 #else
6014 TCGv t0;
6015 if (unlikely(!ctx->mem_idx)) {
6016 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6017 return;
6020 if (rA(ctx->opcode)) {
6021 t0 = tcg_temp_new();
6022 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6023 } else {
6024 t0 = tcg_const_tl(0);
6027 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6028 gen_helper_booke206_tlbsx(t0);
6029 #endif
6032 /* tlbwe */
6033 static void gen_tlbwe_booke206(DisasContext *ctx)
6035 #if defined(CONFIG_USER_ONLY)
6036 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6037 #else
6038 if (unlikely(!ctx->mem_idx)) {
6039 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6040 return;
6042 gen_helper_booke206_tlbwe();
6043 #endif
6046 static void gen_tlbivax_booke206(DisasContext *ctx)
6048 #if defined(CONFIG_USER_ONLY)
6049 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6050 #else
6051 TCGv t0;
6052 if (unlikely(!ctx->mem_idx)) {
6053 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6054 return;
6057 t0 = tcg_temp_new();
6058 gen_addr_reg_index(ctx, t0);
6060 gen_helper_booke206_tlbivax(t0);
6061 #endif
6065 /* wrtee */
6066 static void gen_wrtee(DisasContext *ctx)
6068 #if defined(CONFIG_USER_ONLY)
6069 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6070 #else
6071 TCGv t0;
6072 if (unlikely(!ctx->mem_idx)) {
6073 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6074 return;
6076 t0 = tcg_temp_new();
6077 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6078 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6079 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6080 tcg_temp_free(t0);
6081 /* Stop translation to have a chance to raise an exception
6082 * if we just set msr_ee to 1
6084 gen_stop_exception(ctx);
6085 #endif
6088 /* wrteei */
6089 static void gen_wrteei(DisasContext *ctx)
6091 #if defined(CONFIG_USER_ONLY)
6092 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6093 #else
6094 if (unlikely(!ctx->mem_idx)) {
6095 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6096 return;
6098 if (ctx->opcode & 0x00008000) {
6099 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6100 /* Stop translation to have a chance to raise an exception */
6101 gen_stop_exception(ctx);
6102 } else {
6103 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6105 #endif
6108 /* PowerPC 440 specific instructions */
6110 /* dlmzb */
6111 static void gen_dlmzb(DisasContext *ctx)
6113 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6114 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6115 cpu_gpr[rB(ctx->opcode)], t0);
6116 tcg_temp_free_i32(t0);
6119 /* mbar replaces eieio on 440 */
6120 static void gen_mbar(DisasContext *ctx)
6122 /* interpreted as no-op */
6125 /* msync replaces sync on 440 */
6126 static void gen_msync(DisasContext *ctx)
6128 /* interpreted as no-op */
6131 /* icbt */
6132 static void gen_icbt_440(DisasContext *ctx)
6134 /* interpreted as no-op */
6135 /* XXX: specification say this is treated as a load by the MMU
6136 * but does not generate any exception
6140 /*** Altivec vector extension ***/
6141 /* Altivec registers moves */
6143 static inline TCGv_ptr gen_avr_ptr(int reg)
6145 TCGv_ptr r = tcg_temp_new_ptr();
6146 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6147 return r;
6150 #define GEN_VR_LDX(name, opc2, opc3) \
6151 static void glue(gen_, name)(DisasContext *ctx) \
6153 TCGv EA; \
6154 if (unlikely(!ctx->altivec_enabled)) { \
6155 gen_exception(ctx, POWERPC_EXCP_VPU); \
6156 return; \
6158 gen_set_access_type(ctx, ACCESS_INT); \
6159 EA = tcg_temp_new(); \
6160 gen_addr_reg_index(ctx, EA); \
6161 tcg_gen_andi_tl(EA, EA, ~0xf); \
6162 if (ctx->le_mode) { \
6163 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6164 tcg_gen_addi_tl(EA, EA, 8); \
6165 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6166 } else { \
6167 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6168 tcg_gen_addi_tl(EA, EA, 8); \
6169 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6171 tcg_temp_free(EA); \
6174 #define GEN_VR_STX(name, opc2, opc3) \
6175 static void gen_st##name(DisasContext *ctx) \
6177 TCGv EA; \
6178 if (unlikely(!ctx->altivec_enabled)) { \
6179 gen_exception(ctx, POWERPC_EXCP_VPU); \
6180 return; \
6182 gen_set_access_type(ctx, ACCESS_INT); \
6183 EA = tcg_temp_new(); \
6184 gen_addr_reg_index(ctx, EA); \
6185 tcg_gen_andi_tl(EA, EA, ~0xf); \
6186 if (ctx->le_mode) { \
6187 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6188 tcg_gen_addi_tl(EA, EA, 8); \
6189 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6190 } else { \
6191 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6192 tcg_gen_addi_tl(EA, EA, 8); \
6193 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6195 tcg_temp_free(EA); \
6198 #define GEN_VR_LVE(name, opc2, opc3) \
6199 static void gen_lve##name(DisasContext *ctx) \
6201 TCGv EA; \
6202 TCGv_ptr rs; \
6203 if (unlikely(!ctx->altivec_enabled)) { \
6204 gen_exception(ctx, POWERPC_EXCP_VPU); \
6205 return; \
6207 gen_set_access_type(ctx, ACCESS_INT); \
6208 EA = tcg_temp_new(); \
6209 gen_addr_reg_index(ctx, EA); \
6210 rs = gen_avr_ptr(rS(ctx->opcode)); \
6211 gen_helper_lve##name (rs, EA); \
6212 tcg_temp_free(EA); \
6213 tcg_temp_free_ptr(rs); \
6216 #define GEN_VR_STVE(name, opc2, opc3) \
6217 static void gen_stve##name(DisasContext *ctx) \
6219 TCGv EA; \
6220 TCGv_ptr rs; \
6221 if (unlikely(!ctx->altivec_enabled)) { \
6222 gen_exception(ctx, POWERPC_EXCP_VPU); \
6223 return; \
6225 gen_set_access_type(ctx, ACCESS_INT); \
6226 EA = tcg_temp_new(); \
6227 gen_addr_reg_index(ctx, EA); \
6228 rs = gen_avr_ptr(rS(ctx->opcode)); \
6229 gen_helper_stve##name (rs, EA); \
6230 tcg_temp_free(EA); \
6231 tcg_temp_free_ptr(rs); \
6234 GEN_VR_LDX(lvx, 0x07, 0x03);
6235 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6236 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6238 GEN_VR_LVE(bx, 0x07, 0x00);
6239 GEN_VR_LVE(hx, 0x07, 0x01);
6240 GEN_VR_LVE(wx, 0x07, 0x02);
6242 GEN_VR_STX(svx, 0x07, 0x07);
6243 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6244 GEN_VR_STX(svxl, 0x07, 0x0F);
6246 GEN_VR_STVE(bx, 0x07, 0x04);
6247 GEN_VR_STVE(hx, 0x07, 0x05);
6248 GEN_VR_STVE(wx, 0x07, 0x06);
6250 static void gen_lvsl(DisasContext *ctx)
6252 TCGv_ptr rd;
6253 TCGv EA;
6254 if (unlikely(!ctx->altivec_enabled)) {
6255 gen_exception(ctx, POWERPC_EXCP_VPU);
6256 return;
6258 EA = tcg_temp_new();
6259 gen_addr_reg_index(ctx, EA);
6260 rd = gen_avr_ptr(rD(ctx->opcode));
6261 gen_helper_lvsl(rd, EA);
6262 tcg_temp_free(EA);
6263 tcg_temp_free_ptr(rd);
6266 static void gen_lvsr(DisasContext *ctx)
6268 TCGv_ptr rd;
6269 TCGv EA;
6270 if (unlikely(!ctx->altivec_enabled)) {
6271 gen_exception(ctx, POWERPC_EXCP_VPU);
6272 return;
6274 EA = tcg_temp_new();
6275 gen_addr_reg_index(ctx, EA);
6276 rd = gen_avr_ptr(rD(ctx->opcode));
6277 gen_helper_lvsr(rd, EA);
6278 tcg_temp_free(EA);
6279 tcg_temp_free_ptr(rd);
6282 static void gen_mfvscr(DisasContext *ctx)
6284 TCGv_i32 t;
6285 if (unlikely(!ctx->altivec_enabled)) {
6286 gen_exception(ctx, POWERPC_EXCP_VPU);
6287 return;
6289 tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6290 t = tcg_temp_new_i32();
6291 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, vscr));
6292 tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6293 tcg_temp_free_i32(t);
6296 static void gen_mtvscr(DisasContext *ctx)
6298 TCGv_ptr p;
6299 if (unlikely(!ctx->altivec_enabled)) {
6300 gen_exception(ctx, POWERPC_EXCP_VPU);
6301 return;
6303 p = gen_avr_ptr(rD(ctx->opcode));
6304 gen_helper_mtvscr(p);
6305 tcg_temp_free_ptr(p);
6308 /* Logical operations */
6309 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6310 static void glue(gen_, name)(DisasContext *ctx) \
6312 if (unlikely(!ctx->altivec_enabled)) { \
6313 gen_exception(ctx, POWERPC_EXCP_VPU); \
6314 return; \
6316 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6317 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6320 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6321 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6322 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6323 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6324 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6326 #define GEN_VXFORM(name, opc2, opc3) \
6327 static void glue(gen_, name)(DisasContext *ctx) \
6329 TCGv_ptr ra, rb, rd; \
6330 if (unlikely(!ctx->altivec_enabled)) { \
6331 gen_exception(ctx, POWERPC_EXCP_VPU); \
6332 return; \
6334 ra = gen_avr_ptr(rA(ctx->opcode)); \
6335 rb = gen_avr_ptr(rB(ctx->opcode)); \
6336 rd = gen_avr_ptr(rD(ctx->opcode)); \
6337 gen_helper_##name (rd, ra, rb); \
6338 tcg_temp_free_ptr(ra); \
6339 tcg_temp_free_ptr(rb); \
6340 tcg_temp_free_ptr(rd); \
6343 GEN_VXFORM(vaddubm, 0, 0);
6344 GEN_VXFORM(vadduhm, 0, 1);
6345 GEN_VXFORM(vadduwm, 0, 2);
6346 GEN_VXFORM(vsububm, 0, 16);
6347 GEN_VXFORM(vsubuhm, 0, 17);
6348 GEN_VXFORM(vsubuwm, 0, 18);
6349 GEN_VXFORM(vmaxub, 1, 0);
6350 GEN_VXFORM(vmaxuh, 1, 1);
6351 GEN_VXFORM(vmaxuw, 1, 2);
6352 GEN_VXFORM(vmaxsb, 1, 4);
6353 GEN_VXFORM(vmaxsh, 1, 5);
6354 GEN_VXFORM(vmaxsw, 1, 6);
6355 GEN_VXFORM(vminub, 1, 8);
6356 GEN_VXFORM(vminuh, 1, 9);
6357 GEN_VXFORM(vminuw, 1, 10);
6358 GEN_VXFORM(vminsb, 1, 12);
6359 GEN_VXFORM(vminsh, 1, 13);
6360 GEN_VXFORM(vminsw, 1, 14);
6361 GEN_VXFORM(vavgub, 1, 16);
6362 GEN_VXFORM(vavguh, 1, 17);
6363 GEN_VXFORM(vavguw, 1, 18);
6364 GEN_VXFORM(vavgsb, 1, 20);
6365 GEN_VXFORM(vavgsh, 1, 21);
6366 GEN_VXFORM(vavgsw, 1, 22);
6367 GEN_VXFORM(vmrghb, 6, 0);
6368 GEN_VXFORM(vmrghh, 6, 1);
6369 GEN_VXFORM(vmrghw, 6, 2);
6370 GEN_VXFORM(vmrglb, 6, 4);
6371 GEN_VXFORM(vmrglh, 6, 5);
6372 GEN_VXFORM(vmrglw, 6, 6);
6373 GEN_VXFORM(vmuloub, 4, 0);
6374 GEN_VXFORM(vmulouh, 4, 1);
6375 GEN_VXFORM(vmulosb, 4, 4);
6376 GEN_VXFORM(vmulosh, 4, 5);
6377 GEN_VXFORM(vmuleub, 4, 8);
6378 GEN_VXFORM(vmuleuh, 4, 9);
6379 GEN_VXFORM(vmulesb, 4, 12);
6380 GEN_VXFORM(vmulesh, 4, 13);
6381 GEN_VXFORM(vslb, 2, 4);
6382 GEN_VXFORM(vslh, 2, 5);
6383 GEN_VXFORM(vslw, 2, 6);
6384 GEN_VXFORM(vsrb, 2, 8);
6385 GEN_VXFORM(vsrh, 2, 9);
6386 GEN_VXFORM(vsrw, 2, 10);
6387 GEN_VXFORM(vsrab, 2, 12);
6388 GEN_VXFORM(vsrah, 2, 13);
6389 GEN_VXFORM(vsraw, 2, 14);
6390 GEN_VXFORM(vslo, 6, 16);
6391 GEN_VXFORM(vsro, 6, 17);
6392 GEN_VXFORM(vaddcuw, 0, 6);
6393 GEN_VXFORM(vsubcuw, 0, 22);
6394 GEN_VXFORM(vaddubs, 0, 8);
6395 GEN_VXFORM(vadduhs, 0, 9);
6396 GEN_VXFORM(vadduws, 0, 10);
6397 GEN_VXFORM(vaddsbs, 0, 12);
6398 GEN_VXFORM(vaddshs, 0, 13);
6399 GEN_VXFORM(vaddsws, 0, 14);
6400 GEN_VXFORM(vsububs, 0, 24);
6401 GEN_VXFORM(vsubuhs, 0, 25);
6402 GEN_VXFORM(vsubuws, 0, 26);
6403 GEN_VXFORM(vsubsbs, 0, 28);
6404 GEN_VXFORM(vsubshs, 0, 29);
6405 GEN_VXFORM(vsubsws, 0, 30);
6406 GEN_VXFORM(vrlb, 2, 0);
6407 GEN_VXFORM(vrlh, 2, 1);
6408 GEN_VXFORM(vrlw, 2, 2);
6409 GEN_VXFORM(vsl, 2, 7);
6410 GEN_VXFORM(vsr, 2, 11);
6411 GEN_VXFORM(vpkuhum, 7, 0);
6412 GEN_VXFORM(vpkuwum, 7, 1);
6413 GEN_VXFORM(vpkuhus, 7, 2);
6414 GEN_VXFORM(vpkuwus, 7, 3);
6415 GEN_VXFORM(vpkshus, 7, 4);
6416 GEN_VXFORM(vpkswus, 7, 5);
6417 GEN_VXFORM(vpkshss, 7, 6);
6418 GEN_VXFORM(vpkswss, 7, 7);
6419 GEN_VXFORM(vpkpx, 7, 12);
6420 GEN_VXFORM(vsum4ubs, 4, 24);
6421 GEN_VXFORM(vsum4sbs, 4, 28);
6422 GEN_VXFORM(vsum4shs, 4, 25);
6423 GEN_VXFORM(vsum2sws, 4, 26);
6424 GEN_VXFORM(vsumsws, 4, 30);
6425 GEN_VXFORM(vaddfp, 5, 0);
6426 GEN_VXFORM(vsubfp, 5, 1);
6427 GEN_VXFORM(vmaxfp, 5, 16);
6428 GEN_VXFORM(vminfp, 5, 17);
6430 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6431 static void glue(gen_, name)(DisasContext *ctx) \
6433 TCGv_ptr ra, rb, rd; \
6434 if (unlikely(!ctx->altivec_enabled)) { \
6435 gen_exception(ctx, POWERPC_EXCP_VPU); \
6436 return; \
6438 ra = gen_avr_ptr(rA(ctx->opcode)); \
6439 rb = gen_avr_ptr(rB(ctx->opcode)); \
6440 rd = gen_avr_ptr(rD(ctx->opcode)); \
6441 gen_helper_##opname (rd, ra, rb); \
6442 tcg_temp_free_ptr(ra); \
6443 tcg_temp_free_ptr(rb); \
6444 tcg_temp_free_ptr(rd); \
6447 #define GEN_VXRFORM(name, opc2, opc3) \
6448 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6449 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6451 GEN_VXRFORM(vcmpequb, 3, 0)
6452 GEN_VXRFORM(vcmpequh, 3, 1)
6453 GEN_VXRFORM(vcmpequw, 3, 2)
6454 GEN_VXRFORM(vcmpgtsb, 3, 12)
6455 GEN_VXRFORM(vcmpgtsh, 3, 13)
6456 GEN_VXRFORM(vcmpgtsw, 3, 14)
6457 GEN_VXRFORM(vcmpgtub, 3, 8)
6458 GEN_VXRFORM(vcmpgtuh, 3, 9)
6459 GEN_VXRFORM(vcmpgtuw, 3, 10)
6460 GEN_VXRFORM(vcmpeqfp, 3, 3)
6461 GEN_VXRFORM(vcmpgefp, 3, 7)
6462 GEN_VXRFORM(vcmpgtfp, 3, 11)
6463 GEN_VXRFORM(vcmpbfp, 3, 15)
6465 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6466 static void glue(gen_, name)(DisasContext *ctx) \
6468 TCGv_ptr rd; \
6469 TCGv_i32 simm; \
6470 if (unlikely(!ctx->altivec_enabled)) { \
6471 gen_exception(ctx, POWERPC_EXCP_VPU); \
6472 return; \
6474 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6475 rd = gen_avr_ptr(rD(ctx->opcode)); \
6476 gen_helper_##name (rd, simm); \
6477 tcg_temp_free_i32(simm); \
6478 tcg_temp_free_ptr(rd); \
6481 GEN_VXFORM_SIMM(vspltisb, 6, 12);
6482 GEN_VXFORM_SIMM(vspltish, 6, 13);
6483 GEN_VXFORM_SIMM(vspltisw, 6, 14);
6485 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6486 static void glue(gen_, name)(DisasContext *ctx) \
6488 TCGv_ptr rb, rd; \
6489 if (unlikely(!ctx->altivec_enabled)) { \
6490 gen_exception(ctx, POWERPC_EXCP_VPU); \
6491 return; \
6493 rb = gen_avr_ptr(rB(ctx->opcode)); \
6494 rd = gen_avr_ptr(rD(ctx->opcode)); \
6495 gen_helper_##name (rd, rb); \
6496 tcg_temp_free_ptr(rb); \
6497 tcg_temp_free_ptr(rd); \
6500 GEN_VXFORM_NOA(vupkhsb, 7, 8);
6501 GEN_VXFORM_NOA(vupkhsh, 7, 9);
6502 GEN_VXFORM_NOA(vupklsb, 7, 10);
6503 GEN_VXFORM_NOA(vupklsh, 7, 11);
6504 GEN_VXFORM_NOA(vupkhpx, 7, 13);
6505 GEN_VXFORM_NOA(vupklpx, 7, 15);
6506 GEN_VXFORM_NOA(vrefp, 5, 4);
6507 GEN_VXFORM_NOA(vrsqrtefp, 5, 5);
6508 GEN_VXFORM_NOA(vexptefp, 5, 6);
6509 GEN_VXFORM_NOA(vlogefp, 5, 7);
6510 GEN_VXFORM_NOA(vrfim, 5, 8);
6511 GEN_VXFORM_NOA(vrfin, 5, 9);
6512 GEN_VXFORM_NOA(vrfip, 5, 10);
6513 GEN_VXFORM_NOA(vrfiz, 5, 11);
6515 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6516 static void glue(gen_, name)(DisasContext *ctx) \
6518 TCGv_ptr rd; \
6519 TCGv_i32 simm; \
6520 if (unlikely(!ctx->altivec_enabled)) { \
6521 gen_exception(ctx, POWERPC_EXCP_VPU); \
6522 return; \
6524 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6525 rd = gen_avr_ptr(rD(ctx->opcode)); \
6526 gen_helper_##name (rd, simm); \
6527 tcg_temp_free_i32(simm); \
6528 tcg_temp_free_ptr(rd); \
6531 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6532 static void glue(gen_, name)(DisasContext *ctx) \
6534 TCGv_ptr rb, rd; \
6535 TCGv_i32 uimm; \
6536 if (unlikely(!ctx->altivec_enabled)) { \
6537 gen_exception(ctx, POWERPC_EXCP_VPU); \
6538 return; \
6540 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6541 rb = gen_avr_ptr(rB(ctx->opcode)); \
6542 rd = gen_avr_ptr(rD(ctx->opcode)); \
6543 gen_helper_##name (rd, rb, uimm); \
6544 tcg_temp_free_i32(uimm); \
6545 tcg_temp_free_ptr(rb); \
6546 tcg_temp_free_ptr(rd); \
6549 GEN_VXFORM_UIMM(vspltb, 6, 8);
6550 GEN_VXFORM_UIMM(vsplth, 6, 9);
6551 GEN_VXFORM_UIMM(vspltw, 6, 10);
6552 GEN_VXFORM_UIMM(vcfux, 5, 12);
6553 GEN_VXFORM_UIMM(vcfsx, 5, 13);
6554 GEN_VXFORM_UIMM(vctuxs, 5, 14);
6555 GEN_VXFORM_UIMM(vctsxs, 5, 15);
6557 static void gen_vsldoi(DisasContext *ctx)
6559 TCGv_ptr ra, rb, rd;
6560 TCGv_i32 sh;
6561 if (unlikely(!ctx->altivec_enabled)) {
6562 gen_exception(ctx, POWERPC_EXCP_VPU);
6563 return;
6565 ra = gen_avr_ptr(rA(ctx->opcode));
6566 rb = gen_avr_ptr(rB(ctx->opcode));
6567 rd = gen_avr_ptr(rD(ctx->opcode));
6568 sh = tcg_const_i32(VSH(ctx->opcode));
6569 gen_helper_vsldoi (rd, ra, rb, sh);
6570 tcg_temp_free_ptr(ra);
6571 tcg_temp_free_ptr(rb);
6572 tcg_temp_free_ptr(rd);
6573 tcg_temp_free_i32(sh);
6576 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6577 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6579 TCGv_ptr ra, rb, rc, rd; \
6580 if (unlikely(!ctx->altivec_enabled)) { \
6581 gen_exception(ctx, POWERPC_EXCP_VPU); \
6582 return; \
6584 ra = gen_avr_ptr(rA(ctx->opcode)); \
6585 rb = gen_avr_ptr(rB(ctx->opcode)); \
6586 rc = gen_avr_ptr(rC(ctx->opcode)); \
6587 rd = gen_avr_ptr(rD(ctx->opcode)); \
6588 if (Rc(ctx->opcode)) { \
6589 gen_helper_##name1 (rd, ra, rb, rc); \
6590 } else { \
6591 gen_helper_##name0 (rd, ra, rb, rc); \
6593 tcg_temp_free_ptr(ra); \
6594 tcg_temp_free_ptr(rb); \
6595 tcg_temp_free_ptr(rc); \
6596 tcg_temp_free_ptr(rd); \
6599 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6601 static void gen_vmladduhm(DisasContext *ctx)
6603 TCGv_ptr ra, rb, rc, rd;
6604 if (unlikely(!ctx->altivec_enabled)) {
6605 gen_exception(ctx, POWERPC_EXCP_VPU);
6606 return;
6608 ra = gen_avr_ptr(rA(ctx->opcode));
6609 rb = gen_avr_ptr(rB(ctx->opcode));
6610 rc = gen_avr_ptr(rC(ctx->opcode));
6611 rd = gen_avr_ptr(rD(ctx->opcode));
6612 gen_helper_vmladduhm(rd, ra, rb, rc);
6613 tcg_temp_free_ptr(ra);
6614 tcg_temp_free_ptr(rb);
6615 tcg_temp_free_ptr(rc);
6616 tcg_temp_free_ptr(rd);
6619 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6620 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6621 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6622 GEN_VAFORM_PAIRED(vsel, vperm, 21)
6623 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6625 /*** SPE extension ***/
6626 /* Register moves */
6629 static inline void gen_evmra(DisasContext *ctx)
6632 if (unlikely(!ctx->spe_enabled)) {
6633 gen_exception(ctx, POWERPC_EXCP_APU);
6634 return;
6637 #if defined(TARGET_PPC64)
6638 /* rD := rA */
6639 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6641 /* spe_acc := rA */
6642 tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6643 cpu_env,
6644 offsetof(CPUState, spe_acc));
6645 #else
6646 TCGv_i64 tmp = tcg_temp_new_i64();
6648 /* tmp := rA_lo + rA_hi << 32 */
6649 tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6651 /* spe_acc := tmp */
6652 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
6653 tcg_temp_free_i64(tmp);
6655 /* rD := rA */
6656 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6657 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6658 #endif
6661 static inline void gen_load_gpr64(TCGv_i64 t, int reg)
6663 #if defined(TARGET_PPC64)
6664 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6665 #else
6666 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6667 #endif
6670 static inline void gen_store_gpr64(int reg, TCGv_i64 t)
6672 #if defined(TARGET_PPC64)
6673 tcg_gen_mov_i64(cpu_gpr[reg], t);
6674 #else
6675 TCGv_i64 tmp = tcg_temp_new_i64();
6676 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6677 tcg_gen_shri_i64(tmp, t, 32);
6678 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6679 tcg_temp_free_i64(tmp);
6680 #endif
6683 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6684 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6686 if (Rc(ctx->opcode)) \
6687 gen_##name1(ctx); \
6688 else \
6689 gen_##name0(ctx); \
6692 /* Handler for undefined SPE opcodes */
6693 static inline void gen_speundef(DisasContext *ctx)
6695 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6698 /* SPE logic */
6699 #if defined(TARGET_PPC64)
6700 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6701 static inline void gen_##name(DisasContext *ctx) \
6703 if (unlikely(!ctx->spe_enabled)) { \
6704 gen_exception(ctx, POWERPC_EXCP_APU); \
6705 return; \
6707 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6708 cpu_gpr[rB(ctx->opcode)]); \
6710 #else
6711 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6712 static inline void gen_##name(DisasContext *ctx) \
6714 if (unlikely(!ctx->spe_enabled)) { \
6715 gen_exception(ctx, POWERPC_EXCP_APU); \
6716 return; \
6718 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6719 cpu_gpr[rB(ctx->opcode)]); \
6720 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6721 cpu_gprh[rB(ctx->opcode)]); \
6723 #endif
6725 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6726 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6727 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6728 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6729 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6730 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6731 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6732 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6734 /* SPE logic immediate */
6735 #if defined(TARGET_PPC64)
6736 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6737 static inline void gen_##name(DisasContext *ctx) \
6739 if (unlikely(!ctx->spe_enabled)) { \
6740 gen_exception(ctx, POWERPC_EXCP_APU); \
6741 return; \
6743 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6744 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6745 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6746 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6747 tcg_opi(t0, t0, rB(ctx->opcode)); \
6748 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6749 tcg_gen_trunc_i64_i32(t1, t2); \
6750 tcg_temp_free_i64(t2); \
6751 tcg_opi(t1, t1, rB(ctx->opcode)); \
6752 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6753 tcg_temp_free_i32(t0); \
6754 tcg_temp_free_i32(t1); \
6756 #else
6757 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6758 static inline void gen_##name(DisasContext *ctx) \
6760 if (unlikely(!ctx->spe_enabled)) { \
6761 gen_exception(ctx, POWERPC_EXCP_APU); \
6762 return; \
6764 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6765 rB(ctx->opcode)); \
6766 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6767 rB(ctx->opcode)); \
6769 #endif
6770 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6771 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6772 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6773 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6775 /* SPE arithmetic */
6776 #if defined(TARGET_PPC64)
6777 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6778 static inline void gen_##name(DisasContext *ctx) \
6780 if (unlikely(!ctx->spe_enabled)) { \
6781 gen_exception(ctx, POWERPC_EXCP_APU); \
6782 return; \
6784 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6785 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6786 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6787 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6788 tcg_op(t0, t0); \
6789 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6790 tcg_gen_trunc_i64_i32(t1, t2); \
6791 tcg_temp_free_i64(t2); \
6792 tcg_op(t1, t1); \
6793 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6794 tcg_temp_free_i32(t0); \
6795 tcg_temp_free_i32(t1); \
6797 #else
6798 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6799 static inline void gen_##name(DisasContext *ctx) \
6801 if (unlikely(!ctx->spe_enabled)) { \
6802 gen_exception(ctx, POWERPC_EXCP_APU); \
6803 return; \
6805 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6806 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6808 #endif
6810 static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
6812 int l1 = gen_new_label();
6813 int l2 = gen_new_label();
6815 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6816 tcg_gen_neg_i32(ret, arg1);
6817 tcg_gen_br(l2);
6818 gen_set_label(l1);
6819 tcg_gen_mov_i32(ret, arg1);
6820 gen_set_label(l2);
6822 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6823 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6824 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6825 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6826 static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
6828 tcg_gen_addi_i32(ret, arg1, 0x8000);
6829 tcg_gen_ext16u_i32(ret, ret);
6831 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6832 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6833 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6835 #if defined(TARGET_PPC64)
6836 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6837 static inline void gen_##name(DisasContext *ctx) \
6839 if (unlikely(!ctx->spe_enabled)) { \
6840 gen_exception(ctx, POWERPC_EXCP_APU); \
6841 return; \
6843 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6844 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6845 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6846 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
6847 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6848 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6849 tcg_op(t0, t0, t2); \
6850 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6851 tcg_gen_trunc_i64_i32(t1, t3); \
6852 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6853 tcg_gen_trunc_i64_i32(t2, t3); \
6854 tcg_temp_free_i64(t3); \
6855 tcg_op(t1, t1, t2); \
6856 tcg_temp_free_i32(t2); \
6857 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6858 tcg_temp_free_i32(t0); \
6859 tcg_temp_free_i32(t1); \
6861 #else
6862 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6863 static inline void gen_##name(DisasContext *ctx) \
6865 if (unlikely(!ctx->spe_enabled)) { \
6866 gen_exception(ctx, POWERPC_EXCP_APU); \
6867 return; \
6869 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6870 cpu_gpr[rB(ctx->opcode)]); \
6871 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6872 cpu_gprh[rB(ctx->opcode)]); \
6874 #endif
6876 static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6878 TCGv_i32 t0;
6879 int l1, l2;
6881 l1 = gen_new_label();
6882 l2 = gen_new_label();
6883 t0 = tcg_temp_local_new_i32();
6884 /* No error here: 6 bits are used */
6885 tcg_gen_andi_i32(t0, arg2, 0x3F);
6886 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6887 tcg_gen_shr_i32(ret, arg1, t0);
6888 tcg_gen_br(l2);
6889 gen_set_label(l1);
6890 tcg_gen_movi_i32(ret, 0);
6891 gen_set_label(l2);
6892 tcg_temp_free_i32(t0);
6894 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6895 static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6897 TCGv_i32 t0;
6898 int l1, l2;
6900 l1 = gen_new_label();
6901 l2 = gen_new_label();
6902 t0 = tcg_temp_local_new_i32();
6903 /* No error here: 6 bits are used */
6904 tcg_gen_andi_i32(t0, arg2, 0x3F);
6905 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6906 tcg_gen_sar_i32(ret, arg1, t0);
6907 tcg_gen_br(l2);
6908 gen_set_label(l1);
6909 tcg_gen_movi_i32(ret, 0);
6910 gen_set_label(l2);
6911 tcg_temp_free_i32(t0);
6913 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6914 static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6916 TCGv_i32 t0;
6917 int l1, l2;
6919 l1 = gen_new_label();
6920 l2 = gen_new_label();
6921 t0 = tcg_temp_local_new_i32();
6922 /* No error here: 6 bits are used */
6923 tcg_gen_andi_i32(t0, arg2, 0x3F);
6924 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6925 tcg_gen_shl_i32(ret, arg1, t0);
6926 tcg_gen_br(l2);
6927 gen_set_label(l1);
6928 tcg_gen_movi_i32(ret, 0);
6929 gen_set_label(l2);
6930 tcg_temp_free_i32(t0);
6932 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6933 static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6935 TCGv_i32 t0 = tcg_temp_new_i32();
6936 tcg_gen_andi_i32(t0, arg2, 0x1F);
6937 tcg_gen_rotl_i32(ret, arg1, t0);
6938 tcg_temp_free_i32(t0);
6940 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6941 static inline void gen_evmergehi(DisasContext *ctx)
6943 if (unlikely(!ctx->spe_enabled)) {
6944 gen_exception(ctx, POWERPC_EXCP_APU);
6945 return;
6947 #if defined(TARGET_PPC64)
6948 TCGv t0 = tcg_temp_new();
6949 TCGv t1 = tcg_temp_new();
6950 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6951 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6952 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6953 tcg_temp_free(t0);
6954 tcg_temp_free(t1);
6955 #else
6956 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6957 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6958 #endif
6960 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6961 static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6963 tcg_gen_sub_i32(ret, arg2, arg1);
6965 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6967 /* SPE arithmetic immediate */
6968 #if defined(TARGET_PPC64)
6969 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6970 static inline void gen_##name(DisasContext *ctx) \
6972 if (unlikely(!ctx->spe_enabled)) { \
6973 gen_exception(ctx, POWERPC_EXCP_APU); \
6974 return; \
6976 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6977 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6978 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6979 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6980 tcg_op(t0, t0, rA(ctx->opcode)); \
6981 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6982 tcg_gen_trunc_i64_i32(t1, t2); \
6983 tcg_temp_free_i64(t2); \
6984 tcg_op(t1, t1, rA(ctx->opcode)); \
6985 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6986 tcg_temp_free_i32(t0); \
6987 tcg_temp_free_i32(t1); \
6989 #else
6990 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6991 static inline void gen_##name(DisasContext *ctx) \
6993 if (unlikely(!ctx->spe_enabled)) { \
6994 gen_exception(ctx, POWERPC_EXCP_APU); \
6995 return; \
6997 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6998 rA(ctx->opcode)); \
6999 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
7000 rA(ctx->opcode)); \
7002 #endif
7003 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
7004 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
7006 /* SPE comparison */
7007 #if defined(TARGET_PPC64)
7008 #define GEN_SPEOP_COMP(name, tcg_cond) \
7009 static inline void gen_##name(DisasContext *ctx) \
7011 if (unlikely(!ctx->spe_enabled)) { \
7012 gen_exception(ctx, POWERPC_EXCP_APU); \
7013 return; \
7015 int l1 = gen_new_label(); \
7016 int l2 = gen_new_label(); \
7017 int l3 = gen_new_label(); \
7018 int l4 = gen_new_label(); \
7019 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7020 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7021 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7022 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7023 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7024 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7025 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7026 tcg_gen_br(l2); \
7027 gen_set_label(l1); \
7028 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7029 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7030 gen_set_label(l2); \
7031 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7032 tcg_gen_trunc_i64_i32(t0, t2); \
7033 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7034 tcg_gen_trunc_i64_i32(t1, t2); \
7035 tcg_temp_free_i64(t2); \
7036 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7037 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7038 ~(CRF_CH | CRF_CH_AND_CL)); \
7039 tcg_gen_br(l4); \
7040 gen_set_label(l3); \
7041 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7042 CRF_CH | CRF_CH_OR_CL); \
7043 gen_set_label(l4); \
7044 tcg_temp_free_i32(t0); \
7045 tcg_temp_free_i32(t1); \
7047 #else
7048 #define GEN_SPEOP_COMP(name, tcg_cond) \
7049 static inline void gen_##name(DisasContext *ctx) \
7051 if (unlikely(!ctx->spe_enabled)) { \
7052 gen_exception(ctx, POWERPC_EXCP_APU); \
7053 return; \
7055 int l1 = gen_new_label(); \
7056 int l2 = gen_new_label(); \
7057 int l3 = gen_new_label(); \
7058 int l4 = gen_new_label(); \
7060 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7061 cpu_gpr[rB(ctx->opcode)], l1); \
7062 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7063 tcg_gen_br(l2); \
7064 gen_set_label(l1); \
7065 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7066 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7067 gen_set_label(l2); \
7068 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7069 cpu_gprh[rB(ctx->opcode)], l3); \
7070 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7071 ~(CRF_CH | CRF_CH_AND_CL)); \
7072 tcg_gen_br(l4); \
7073 gen_set_label(l3); \
7074 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7075 CRF_CH | CRF_CH_OR_CL); \
7076 gen_set_label(l4); \
7078 #endif
7079 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7080 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7081 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7082 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7083 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7085 /* SPE misc */
7086 static inline void gen_brinc(DisasContext *ctx)
7088 /* Note: brinc is usable even if SPE is disabled */
7089 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7090 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7092 static inline void gen_evmergelo(DisasContext *ctx)
7094 if (unlikely(!ctx->spe_enabled)) {
7095 gen_exception(ctx, POWERPC_EXCP_APU);
7096 return;
7098 #if defined(TARGET_PPC64)
7099 TCGv t0 = tcg_temp_new();
7100 TCGv t1 = tcg_temp_new();
7101 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7102 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7103 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7104 tcg_temp_free(t0);
7105 tcg_temp_free(t1);
7106 #else
7107 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7108 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7109 #endif
7111 static inline void gen_evmergehilo(DisasContext *ctx)
7113 if (unlikely(!ctx->spe_enabled)) {
7114 gen_exception(ctx, POWERPC_EXCP_APU);
7115 return;
7117 #if defined(TARGET_PPC64)
7118 TCGv t0 = tcg_temp_new();
7119 TCGv t1 = tcg_temp_new();
7120 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7121 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7122 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7123 tcg_temp_free(t0);
7124 tcg_temp_free(t1);
7125 #else
7126 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7127 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7128 #endif
7130 static inline void gen_evmergelohi(DisasContext *ctx)
7132 if (unlikely(!ctx->spe_enabled)) {
7133 gen_exception(ctx, POWERPC_EXCP_APU);
7134 return;
7136 #if defined(TARGET_PPC64)
7137 TCGv t0 = tcg_temp_new();
7138 TCGv t1 = tcg_temp_new();
7139 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7140 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7141 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7142 tcg_temp_free(t0);
7143 tcg_temp_free(t1);
7144 #else
7145 if (rD(ctx->opcode) == rA(ctx->opcode)) {
7146 TCGv_i32 tmp = tcg_temp_new_i32();
7147 tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7148 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7149 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7150 tcg_temp_free_i32(tmp);
7151 } else {
7152 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7153 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7155 #endif
7157 static inline void gen_evsplati(DisasContext *ctx)
7159 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7161 #if defined(TARGET_PPC64)
7162 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7163 #else
7164 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7165 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7166 #endif
7168 static inline void gen_evsplatfi(DisasContext *ctx)
7170 uint64_t imm = rA(ctx->opcode) << 27;
7172 #if defined(TARGET_PPC64)
7173 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7174 #else
7175 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7176 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7177 #endif
7180 static inline void gen_evsel(DisasContext *ctx)
7182 int l1 = gen_new_label();
7183 int l2 = gen_new_label();
7184 int l3 = gen_new_label();
7185 int l4 = gen_new_label();
7186 TCGv_i32 t0 = tcg_temp_local_new_i32();
7187 #if defined(TARGET_PPC64)
7188 TCGv t1 = tcg_temp_local_new();
7189 TCGv t2 = tcg_temp_local_new();
7190 #endif
7191 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7192 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7193 #if defined(TARGET_PPC64)
7194 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7195 #else
7196 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7197 #endif
7198 tcg_gen_br(l2);
7199 gen_set_label(l1);
7200 #if defined(TARGET_PPC64)
7201 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7202 #else
7203 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7204 #endif
7205 gen_set_label(l2);
7206 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7207 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7208 #if defined(TARGET_PPC64)
7209 tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7210 #else
7211 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7212 #endif
7213 tcg_gen_br(l4);
7214 gen_set_label(l3);
7215 #if defined(TARGET_PPC64)
7216 tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7217 #else
7218 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7219 #endif
7220 gen_set_label(l4);
7221 tcg_temp_free_i32(t0);
7222 #if defined(TARGET_PPC64)
7223 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7224 tcg_temp_free(t1);
7225 tcg_temp_free(t2);
7226 #endif
7229 static void gen_evsel0(DisasContext *ctx)
7231 gen_evsel(ctx);
7234 static void gen_evsel1(DisasContext *ctx)
7236 gen_evsel(ctx);
7239 static void gen_evsel2(DisasContext *ctx)
7241 gen_evsel(ctx);
7244 static void gen_evsel3(DisasContext *ctx)
7246 gen_evsel(ctx);
7249 /* Multiply */
7251 static inline void gen_evmwumi(DisasContext *ctx)
7253 TCGv_i64 t0, t1;
7255 if (unlikely(!ctx->spe_enabled)) {
7256 gen_exception(ctx, POWERPC_EXCP_APU);
7257 return;
7260 t0 = tcg_temp_new_i64();
7261 t1 = tcg_temp_new_i64();
7263 /* t0 := rA; t1 := rB */
7264 #if defined(TARGET_PPC64)
7265 tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7266 tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7267 #else
7268 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7269 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7270 #endif
7272 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7274 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7276 tcg_temp_free_i64(t0);
7277 tcg_temp_free_i64(t1);
7280 static inline void gen_evmwumia(DisasContext *ctx)
7282 TCGv_i64 tmp;
7284 if (unlikely(!ctx->spe_enabled)) {
7285 gen_exception(ctx, POWERPC_EXCP_APU);
7286 return;
7289 gen_evmwumi(ctx); /* rD := rA * rB */
7291 tmp = tcg_temp_new_i64();
7293 /* acc := rD */
7294 gen_load_gpr64(tmp, rD(ctx->opcode));
7295 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7296 tcg_temp_free_i64(tmp);
7299 static inline void gen_evmwumiaa(DisasContext *ctx)
7301 TCGv_i64 acc;
7302 TCGv_i64 tmp;
7304 if (unlikely(!ctx->spe_enabled)) {
7305 gen_exception(ctx, POWERPC_EXCP_APU);
7306 return;
7309 gen_evmwumi(ctx); /* rD := rA * rB */
7311 acc = tcg_temp_new_i64();
7312 tmp = tcg_temp_new_i64();
7314 /* tmp := rD */
7315 gen_load_gpr64(tmp, rD(ctx->opcode));
7317 /* Load acc */
7318 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7320 /* acc := tmp + acc */
7321 tcg_gen_add_i64(acc, acc, tmp);
7323 /* Store acc */
7324 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7326 /* rD := acc */
7327 gen_store_gpr64(rD(ctx->opcode), acc);
7329 tcg_temp_free_i64(acc);
7330 tcg_temp_free_i64(tmp);
7333 static inline void gen_evmwsmi(DisasContext *ctx)
7335 TCGv_i64 t0, t1;
7337 if (unlikely(!ctx->spe_enabled)) {
7338 gen_exception(ctx, POWERPC_EXCP_APU);
7339 return;
7342 t0 = tcg_temp_new_i64();
7343 t1 = tcg_temp_new_i64();
7345 /* t0 := rA; t1 := rB */
7346 #if defined(TARGET_PPC64)
7347 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7348 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7349 #else
7350 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7351 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7352 #endif
7354 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7356 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7358 tcg_temp_free_i64(t0);
7359 tcg_temp_free_i64(t1);
7362 static inline void gen_evmwsmia(DisasContext *ctx)
7364 TCGv_i64 tmp;
7366 gen_evmwsmi(ctx); /* rD := rA * rB */
7368 tmp = tcg_temp_new_i64();
7370 /* acc := rD */
7371 gen_load_gpr64(tmp, rD(ctx->opcode));
7372 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7374 tcg_temp_free_i64(tmp);
7377 static inline void gen_evmwsmiaa(DisasContext *ctx)
7379 TCGv_i64 acc = tcg_temp_new_i64();
7380 TCGv_i64 tmp = tcg_temp_new_i64();
7382 gen_evmwsmi(ctx); /* rD := rA * rB */
7384 acc = tcg_temp_new_i64();
7385 tmp = tcg_temp_new_i64();
7387 /* tmp := rD */
7388 gen_load_gpr64(tmp, rD(ctx->opcode));
7390 /* Load acc */
7391 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7393 /* acc := tmp + acc */
7394 tcg_gen_add_i64(acc, acc, tmp);
7396 /* Store acc */
7397 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7399 /* rD := acc */
7400 gen_store_gpr64(rD(ctx->opcode), acc);
7402 tcg_temp_free_i64(acc);
7403 tcg_temp_free_i64(tmp);
7406 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
7407 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
7408 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
7409 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
7410 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
7411 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
7412 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
7413 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
7414 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE);
7415 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
7416 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
7417 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
7418 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
7419 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7420 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7421 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7422 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
7423 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
7424 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
7425 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
7426 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
7427 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
7428 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
7429 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
7430 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
7431 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
7432 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
7433 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
7434 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
7436 /* SPE load and stores */
7437 static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7439 target_ulong uimm = rB(ctx->opcode);
7441 if (rA(ctx->opcode) == 0) {
7442 tcg_gen_movi_tl(EA, uimm << sh);
7443 } else {
7444 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7445 #if defined(TARGET_PPC64)
7446 if (!ctx->sf_mode) {
7447 tcg_gen_ext32u_tl(EA, EA);
7449 #endif
7453 static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7455 #if defined(TARGET_PPC64)
7456 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7457 #else
7458 TCGv_i64 t0 = tcg_temp_new_i64();
7459 gen_qemu_ld64(ctx, t0, addr);
7460 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7461 tcg_gen_shri_i64(t0, t0, 32);
7462 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7463 tcg_temp_free_i64(t0);
7464 #endif
7467 static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7469 #if defined(TARGET_PPC64)
7470 TCGv t0 = tcg_temp_new();
7471 gen_qemu_ld32u(ctx, t0, addr);
7472 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7473 gen_addr_add(ctx, addr, addr, 4);
7474 gen_qemu_ld32u(ctx, t0, addr);
7475 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7476 tcg_temp_free(t0);
7477 #else
7478 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7479 gen_addr_add(ctx, addr, addr, 4);
7480 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7481 #endif
7484 static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7486 TCGv t0 = tcg_temp_new();
7487 #if defined(TARGET_PPC64)
7488 gen_qemu_ld16u(ctx, t0, addr);
7489 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7490 gen_addr_add(ctx, addr, addr, 2);
7491 gen_qemu_ld16u(ctx, t0, addr);
7492 tcg_gen_shli_tl(t0, t0, 32);
7493 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7494 gen_addr_add(ctx, addr, addr, 2);
7495 gen_qemu_ld16u(ctx, t0, addr);
7496 tcg_gen_shli_tl(t0, t0, 16);
7497 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7498 gen_addr_add(ctx, addr, addr, 2);
7499 gen_qemu_ld16u(ctx, t0, addr);
7500 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7501 #else
7502 gen_qemu_ld16u(ctx, t0, addr);
7503 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7504 gen_addr_add(ctx, addr, addr, 2);
7505 gen_qemu_ld16u(ctx, t0, addr);
7506 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7507 gen_addr_add(ctx, addr, addr, 2);
7508 gen_qemu_ld16u(ctx, t0, addr);
7509 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7510 gen_addr_add(ctx, addr, addr, 2);
7511 gen_qemu_ld16u(ctx, t0, addr);
7512 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7513 #endif
7514 tcg_temp_free(t0);
7517 static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7519 TCGv t0 = tcg_temp_new();
7520 gen_qemu_ld16u(ctx, t0, addr);
7521 #if defined(TARGET_PPC64)
7522 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7523 tcg_gen_shli_tl(t0, t0, 16);
7524 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7525 #else
7526 tcg_gen_shli_tl(t0, t0, 16);
7527 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7528 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7529 #endif
7530 tcg_temp_free(t0);
7533 static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7535 TCGv t0 = tcg_temp_new();
7536 gen_qemu_ld16u(ctx, t0, addr);
7537 #if defined(TARGET_PPC64)
7538 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7539 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7540 #else
7541 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7542 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7543 #endif
7544 tcg_temp_free(t0);
7547 static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7549 TCGv t0 = tcg_temp_new();
7550 gen_qemu_ld16s(ctx, t0, addr);
7551 #if defined(TARGET_PPC64)
7552 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7553 tcg_gen_ext32u_tl(t0, t0);
7554 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7555 #else
7556 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7557 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7558 #endif
7559 tcg_temp_free(t0);
7562 static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7564 TCGv t0 = tcg_temp_new();
7565 #if defined(TARGET_PPC64)
7566 gen_qemu_ld16u(ctx, t0, addr);
7567 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7568 gen_addr_add(ctx, addr, addr, 2);
7569 gen_qemu_ld16u(ctx, t0, addr);
7570 tcg_gen_shli_tl(t0, t0, 16);
7571 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7572 #else
7573 gen_qemu_ld16u(ctx, t0, addr);
7574 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7575 gen_addr_add(ctx, addr, addr, 2);
7576 gen_qemu_ld16u(ctx, t0, addr);
7577 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7578 #endif
7579 tcg_temp_free(t0);
7582 static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7584 #if defined(TARGET_PPC64)
7585 TCGv t0 = tcg_temp_new();
7586 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7587 gen_addr_add(ctx, addr, addr, 2);
7588 gen_qemu_ld16u(ctx, t0, addr);
7589 tcg_gen_shli_tl(t0, t0, 32);
7590 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7591 tcg_temp_free(t0);
7592 #else
7593 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7594 gen_addr_add(ctx, addr, addr, 2);
7595 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7596 #endif
7599 static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7601 #if defined(TARGET_PPC64)
7602 TCGv t0 = tcg_temp_new();
7603 gen_qemu_ld16s(ctx, t0, addr);
7604 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7605 gen_addr_add(ctx, addr, addr, 2);
7606 gen_qemu_ld16s(ctx, t0, addr);
7607 tcg_gen_shli_tl(t0, t0, 32);
7608 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7609 tcg_temp_free(t0);
7610 #else
7611 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7612 gen_addr_add(ctx, addr, addr, 2);
7613 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7614 #endif
7617 static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7619 TCGv t0 = tcg_temp_new();
7620 gen_qemu_ld32u(ctx, t0, addr);
7621 #if defined(TARGET_PPC64)
7622 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7623 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7624 #else
7625 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7626 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7627 #endif
7628 tcg_temp_free(t0);
7631 static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7633 TCGv t0 = tcg_temp_new();
7634 #if defined(TARGET_PPC64)
7635 gen_qemu_ld16u(ctx, t0, addr);
7636 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7637 tcg_gen_shli_tl(t0, t0, 32);
7638 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7639 gen_addr_add(ctx, addr, addr, 2);
7640 gen_qemu_ld16u(ctx, t0, addr);
7641 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7642 tcg_gen_shli_tl(t0, t0, 16);
7643 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7644 #else
7645 gen_qemu_ld16u(ctx, t0, addr);
7646 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7647 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7648 gen_addr_add(ctx, addr, addr, 2);
7649 gen_qemu_ld16u(ctx, t0, addr);
7650 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7651 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7652 #endif
7653 tcg_temp_free(t0);
7656 static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
7658 #if defined(TARGET_PPC64)
7659 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7660 #else
7661 TCGv_i64 t0 = tcg_temp_new_i64();
7662 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
7663 gen_qemu_st64(ctx, t0, addr);
7664 tcg_temp_free_i64(t0);
7665 #endif
7668 static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
7670 #if defined(TARGET_PPC64)
7671 TCGv t0 = tcg_temp_new();
7672 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7673 gen_qemu_st32(ctx, t0, addr);
7674 tcg_temp_free(t0);
7675 #else
7676 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7677 #endif
7678 gen_addr_add(ctx, addr, addr, 4);
7679 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7682 static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7684 TCGv t0 = tcg_temp_new();
7685 #if defined(TARGET_PPC64)
7686 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7687 #else
7688 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7689 #endif
7690 gen_qemu_st16(ctx, t0, addr);
7691 gen_addr_add(ctx, addr, addr, 2);
7692 #if defined(TARGET_PPC64)
7693 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7694 gen_qemu_st16(ctx, t0, addr);
7695 #else
7696 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7697 #endif
7698 gen_addr_add(ctx, addr, addr, 2);
7699 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7700 gen_qemu_st16(ctx, t0, addr);
7701 tcg_temp_free(t0);
7702 gen_addr_add(ctx, addr, addr, 2);
7703 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7706 static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7708 TCGv t0 = tcg_temp_new();
7709 #if defined(TARGET_PPC64)
7710 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7711 #else
7712 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7713 #endif
7714 gen_qemu_st16(ctx, t0, addr);
7715 gen_addr_add(ctx, addr, addr, 2);
7716 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7717 gen_qemu_st16(ctx, t0, addr);
7718 tcg_temp_free(t0);
7721 static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7723 #if defined(TARGET_PPC64)
7724 TCGv t0 = tcg_temp_new();
7725 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7726 gen_qemu_st16(ctx, t0, addr);
7727 tcg_temp_free(t0);
7728 #else
7729 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7730 #endif
7731 gen_addr_add(ctx, addr, addr, 2);
7732 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7735 static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7737 #if defined(TARGET_PPC64)
7738 TCGv t0 = tcg_temp_new();
7739 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7740 gen_qemu_st32(ctx, t0, addr);
7741 tcg_temp_free(t0);
7742 #else
7743 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7744 #endif
7747 static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7749 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7752 #define GEN_SPEOP_LDST(name, opc2, sh) \
7753 static void glue(gen_, name)(DisasContext *ctx) \
7755 TCGv t0; \
7756 if (unlikely(!ctx->spe_enabled)) { \
7757 gen_exception(ctx, POWERPC_EXCP_APU); \
7758 return; \
7760 gen_set_access_type(ctx, ACCESS_INT); \
7761 t0 = tcg_temp_new(); \
7762 if (Rc(ctx->opcode)) { \
7763 gen_addr_spe_imm_index(ctx, t0, sh); \
7764 } else { \
7765 gen_addr_reg_index(ctx, t0); \
7767 gen_op_##name(ctx, t0); \
7768 tcg_temp_free(t0); \
7771 GEN_SPEOP_LDST(evldd, 0x00, 3);
7772 GEN_SPEOP_LDST(evldw, 0x01, 3);
7773 GEN_SPEOP_LDST(evldh, 0x02, 3);
7774 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7775 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7776 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7777 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7778 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7779 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7780 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7781 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7783 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7784 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7785 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7786 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7787 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7788 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7789 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7791 /* Multiply and add - TODO */
7792 #if 0
7793 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7794 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7795 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7796 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7797 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7798 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7799 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7800 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7801 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7802 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7803 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7804 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7806 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7807 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7808 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7809 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7810 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7811 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7812 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7813 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7814 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7815 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7816 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7817 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7819 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7820 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7821 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7822 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7823 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7825 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7826 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7827 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7828 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7829 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7830 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7831 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7832 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7833 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7834 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7835 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7836 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7838 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7839 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7840 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7841 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7843 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7844 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7845 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7846 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7847 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7848 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7849 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7850 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7851 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7852 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7853 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7854 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7856 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7857 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7858 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7859 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7860 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7861 #endif
7863 /*** SPE floating-point extension ***/
7864 #if defined(TARGET_PPC64)
7865 #define GEN_SPEFPUOP_CONV_32_32(name) \
7866 static inline void gen_##name(DisasContext *ctx) \
7868 TCGv_i32 t0; \
7869 TCGv t1; \
7870 t0 = tcg_temp_new_i32(); \
7871 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7872 gen_helper_##name(t0, t0); \
7873 t1 = tcg_temp_new(); \
7874 tcg_gen_extu_i32_tl(t1, t0); \
7875 tcg_temp_free_i32(t0); \
7876 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7877 0xFFFFFFFF00000000ULL); \
7878 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7879 tcg_temp_free(t1); \
7881 #define GEN_SPEFPUOP_CONV_32_64(name) \
7882 static inline void gen_##name(DisasContext *ctx) \
7884 TCGv_i32 t0; \
7885 TCGv t1; \
7886 t0 = tcg_temp_new_i32(); \
7887 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7888 t1 = tcg_temp_new(); \
7889 tcg_gen_extu_i32_tl(t1, t0); \
7890 tcg_temp_free_i32(t0); \
7891 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7892 0xFFFFFFFF00000000ULL); \
7893 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7894 tcg_temp_free(t1); \
7896 #define GEN_SPEFPUOP_CONV_64_32(name) \
7897 static inline void gen_##name(DisasContext *ctx) \
7899 TCGv_i32 t0 = tcg_temp_new_i32(); \
7900 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7901 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7902 tcg_temp_free_i32(t0); \
7904 #define GEN_SPEFPUOP_CONV_64_64(name) \
7905 static inline void gen_##name(DisasContext *ctx) \
7907 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7909 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7910 static inline void gen_##name(DisasContext *ctx) \
7912 TCGv_i32 t0, t1; \
7913 TCGv_i64 t2; \
7914 if (unlikely(!ctx->spe_enabled)) { \
7915 gen_exception(ctx, POWERPC_EXCP_APU); \
7916 return; \
7918 t0 = tcg_temp_new_i32(); \
7919 t1 = tcg_temp_new_i32(); \
7920 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7921 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7922 gen_helper_##name(t0, t0, t1); \
7923 tcg_temp_free_i32(t1); \
7924 t2 = tcg_temp_new(); \
7925 tcg_gen_extu_i32_tl(t2, t0); \
7926 tcg_temp_free_i32(t0); \
7927 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7928 0xFFFFFFFF00000000ULL); \
7929 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7930 tcg_temp_free(t2); \
7932 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7933 static inline void gen_##name(DisasContext *ctx) \
7935 if (unlikely(!ctx->spe_enabled)) { \
7936 gen_exception(ctx, POWERPC_EXCP_APU); \
7937 return; \
7939 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7940 cpu_gpr[rB(ctx->opcode)]); \
7942 #define GEN_SPEFPUOP_COMP_32(name) \
7943 static inline void gen_##name(DisasContext *ctx) \
7945 TCGv_i32 t0, t1; \
7946 if (unlikely(!ctx->spe_enabled)) { \
7947 gen_exception(ctx, POWERPC_EXCP_APU); \
7948 return; \
7950 t0 = tcg_temp_new_i32(); \
7951 t1 = tcg_temp_new_i32(); \
7952 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7953 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7954 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7955 tcg_temp_free_i32(t0); \
7956 tcg_temp_free_i32(t1); \
7958 #define GEN_SPEFPUOP_COMP_64(name) \
7959 static inline void gen_##name(DisasContext *ctx) \
7961 if (unlikely(!ctx->spe_enabled)) { \
7962 gen_exception(ctx, POWERPC_EXCP_APU); \
7963 return; \
7965 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7966 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7968 #else
7969 #define GEN_SPEFPUOP_CONV_32_32(name) \
7970 static inline void gen_##name(DisasContext *ctx) \
7972 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7974 #define GEN_SPEFPUOP_CONV_32_64(name) \
7975 static inline void gen_##name(DisasContext *ctx) \
7977 TCGv_i64 t0 = tcg_temp_new_i64(); \
7978 gen_load_gpr64(t0, rB(ctx->opcode)); \
7979 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7980 tcg_temp_free_i64(t0); \
7982 #define GEN_SPEFPUOP_CONV_64_32(name) \
7983 static inline void gen_##name(DisasContext *ctx) \
7985 TCGv_i64 t0 = tcg_temp_new_i64(); \
7986 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7987 gen_store_gpr64(rD(ctx->opcode), t0); \
7988 tcg_temp_free_i64(t0); \
7990 #define GEN_SPEFPUOP_CONV_64_64(name) \
7991 static inline void gen_##name(DisasContext *ctx) \
7993 TCGv_i64 t0 = tcg_temp_new_i64(); \
7994 gen_load_gpr64(t0, rB(ctx->opcode)); \
7995 gen_helper_##name(t0, t0); \
7996 gen_store_gpr64(rD(ctx->opcode), t0); \
7997 tcg_temp_free_i64(t0); \
7999 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
8000 static inline void gen_##name(DisasContext *ctx) \
8002 if (unlikely(!ctx->spe_enabled)) { \
8003 gen_exception(ctx, POWERPC_EXCP_APU); \
8004 return; \
8006 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
8007 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8009 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8010 static inline void gen_##name(DisasContext *ctx) \
8012 TCGv_i64 t0, t1; \
8013 if (unlikely(!ctx->spe_enabled)) { \
8014 gen_exception(ctx, POWERPC_EXCP_APU); \
8015 return; \
8017 t0 = tcg_temp_new_i64(); \
8018 t1 = tcg_temp_new_i64(); \
8019 gen_load_gpr64(t0, rA(ctx->opcode)); \
8020 gen_load_gpr64(t1, rB(ctx->opcode)); \
8021 gen_helper_##name(t0, t0, t1); \
8022 gen_store_gpr64(rD(ctx->opcode), t0); \
8023 tcg_temp_free_i64(t0); \
8024 tcg_temp_free_i64(t1); \
8026 #define GEN_SPEFPUOP_COMP_32(name) \
8027 static inline void gen_##name(DisasContext *ctx) \
8029 if (unlikely(!ctx->spe_enabled)) { \
8030 gen_exception(ctx, POWERPC_EXCP_APU); \
8031 return; \
8033 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8034 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8036 #define GEN_SPEFPUOP_COMP_64(name) \
8037 static inline void gen_##name(DisasContext *ctx) \
8039 TCGv_i64 t0, t1; \
8040 if (unlikely(!ctx->spe_enabled)) { \
8041 gen_exception(ctx, POWERPC_EXCP_APU); \
8042 return; \
8044 t0 = tcg_temp_new_i64(); \
8045 t1 = tcg_temp_new_i64(); \
8046 gen_load_gpr64(t0, rA(ctx->opcode)); \
8047 gen_load_gpr64(t1, rB(ctx->opcode)); \
8048 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
8049 tcg_temp_free_i64(t0); \
8050 tcg_temp_free_i64(t1); \
8052 #endif
8054 /* Single precision floating-point vectors operations */
8055 /* Arithmetic */
8056 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8057 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8058 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8059 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8060 static inline void gen_evfsabs(DisasContext *ctx)
8062 if (unlikely(!ctx->spe_enabled)) {
8063 gen_exception(ctx, POWERPC_EXCP_APU);
8064 return;
8066 #if defined(TARGET_PPC64)
8067 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8068 #else
8069 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8070 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8071 #endif
8073 static inline void gen_evfsnabs(DisasContext *ctx)
8075 if (unlikely(!ctx->spe_enabled)) {
8076 gen_exception(ctx, POWERPC_EXCP_APU);
8077 return;
8079 #if defined(TARGET_PPC64)
8080 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8081 #else
8082 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8083 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8084 #endif
8086 static inline void gen_evfsneg(DisasContext *ctx)
8088 if (unlikely(!ctx->spe_enabled)) {
8089 gen_exception(ctx, POWERPC_EXCP_APU);
8090 return;
8092 #if defined(TARGET_PPC64)
8093 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8094 #else
8095 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8096 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8097 #endif
8100 /* Conversion */
8101 GEN_SPEFPUOP_CONV_64_64(evfscfui);
8102 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8103 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8104 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8105 GEN_SPEFPUOP_CONV_64_64(evfsctui);
8106 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8107 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8108 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8109 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8110 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8112 /* Comparison */
8113 GEN_SPEFPUOP_COMP_64(evfscmpgt);
8114 GEN_SPEFPUOP_COMP_64(evfscmplt);
8115 GEN_SPEFPUOP_COMP_64(evfscmpeq);
8116 GEN_SPEFPUOP_COMP_64(evfststgt);
8117 GEN_SPEFPUOP_COMP_64(evfststlt);
8118 GEN_SPEFPUOP_COMP_64(evfststeq);
8120 /* Opcodes definitions */
8121 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8122 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8123 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8124 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8125 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8126 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8127 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8128 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8129 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8130 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8131 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8132 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8133 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8134 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8136 /* Single precision floating-point operations */
8137 /* Arithmetic */
8138 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8139 GEN_SPEFPUOP_ARITH2_32_32(efssub);
8140 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8141 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8142 static inline void gen_efsabs(DisasContext *ctx)
8144 if (unlikely(!ctx->spe_enabled)) {
8145 gen_exception(ctx, POWERPC_EXCP_APU);
8146 return;
8148 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8150 static inline void gen_efsnabs(DisasContext *ctx)
8152 if (unlikely(!ctx->spe_enabled)) {
8153 gen_exception(ctx, POWERPC_EXCP_APU);
8154 return;
8156 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8158 static inline void gen_efsneg(DisasContext *ctx)
8160 if (unlikely(!ctx->spe_enabled)) {
8161 gen_exception(ctx, POWERPC_EXCP_APU);
8162 return;
8164 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8167 /* Conversion */
8168 GEN_SPEFPUOP_CONV_32_32(efscfui);
8169 GEN_SPEFPUOP_CONV_32_32(efscfsi);
8170 GEN_SPEFPUOP_CONV_32_32(efscfuf);
8171 GEN_SPEFPUOP_CONV_32_32(efscfsf);
8172 GEN_SPEFPUOP_CONV_32_32(efsctui);
8173 GEN_SPEFPUOP_CONV_32_32(efsctsi);
8174 GEN_SPEFPUOP_CONV_32_32(efsctuf);
8175 GEN_SPEFPUOP_CONV_32_32(efsctsf);
8176 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8177 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8178 GEN_SPEFPUOP_CONV_32_64(efscfd);
8180 /* Comparison */
8181 GEN_SPEFPUOP_COMP_32(efscmpgt);
8182 GEN_SPEFPUOP_COMP_32(efscmplt);
8183 GEN_SPEFPUOP_COMP_32(efscmpeq);
8184 GEN_SPEFPUOP_COMP_32(efststgt);
8185 GEN_SPEFPUOP_COMP_32(efststlt);
8186 GEN_SPEFPUOP_COMP_32(efststeq);
8188 /* Opcodes definitions */
8189 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8190 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8191 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8192 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8193 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8194 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8195 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8196 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8197 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8198 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8199 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8200 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8201 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8202 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8204 /* Double precision floating-point operations */
8205 /* Arithmetic */
8206 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8207 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8208 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8209 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8210 static inline void gen_efdabs(DisasContext *ctx)
8212 if (unlikely(!ctx->spe_enabled)) {
8213 gen_exception(ctx, POWERPC_EXCP_APU);
8214 return;
8216 #if defined(TARGET_PPC64)
8217 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8218 #else
8219 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8220 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8221 #endif
8223 static inline void gen_efdnabs(DisasContext *ctx)
8225 if (unlikely(!ctx->spe_enabled)) {
8226 gen_exception(ctx, POWERPC_EXCP_APU);
8227 return;
8229 #if defined(TARGET_PPC64)
8230 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8231 #else
8232 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8233 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8234 #endif
8236 static inline void gen_efdneg(DisasContext *ctx)
8238 if (unlikely(!ctx->spe_enabled)) {
8239 gen_exception(ctx, POWERPC_EXCP_APU);
8240 return;
8242 #if defined(TARGET_PPC64)
8243 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8244 #else
8245 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8246 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8247 #endif
8250 /* Conversion */
8251 GEN_SPEFPUOP_CONV_64_32(efdcfui);
8252 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8253 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8254 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8255 GEN_SPEFPUOP_CONV_32_64(efdctui);
8256 GEN_SPEFPUOP_CONV_32_64(efdctsi);
8257 GEN_SPEFPUOP_CONV_32_64(efdctuf);
8258 GEN_SPEFPUOP_CONV_32_64(efdctsf);
8259 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8260 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8261 GEN_SPEFPUOP_CONV_64_32(efdcfs);
8262 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8263 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8264 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8265 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8267 /* Comparison */
8268 GEN_SPEFPUOP_COMP_64(efdcmpgt);
8269 GEN_SPEFPUOP_COMP_64(efdcmplt);
8270 GEN_SPEFPUOP_COMP_64(efdcmpeq);
8271 GEN_SPEFPUOP_COMP_64(efdtstgt);
8272 GEN_SPEFPUOP_COMP_64(efdtstlt);
8273 GEN_SPEFPUOP_COMP_64(efdtsteq);
8275 /* Opcodes definitions */
8276 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8277 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8278 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8279 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8280 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8281 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8282 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8283 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8284 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8285 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8286 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8287 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8288 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8289 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8290 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8291 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8293 static opcode_t opcodes[] = {
8294 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8295 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8296 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8297 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8298 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8299 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8300 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8301 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8302 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8303 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8304 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8305 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8306 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8307 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8308 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8309 #if defined(TARGET_PPC64)
8310 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8311 #endif
8312 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8313 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8314 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8315 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8316 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8317 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8318 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8319 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8320 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8321 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8322 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8323 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8324 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8325 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8326 #if defined(TARGET_PPC64)
8327 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8328 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8329 #endif
8330 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8331 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8332 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8333 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8334 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8335 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8336 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8337 #if defined(TARGET_PPC64)
8338 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8339 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8340 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8341 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8342 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8343 #endif
8344 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8345 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8346 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8347 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8348 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8349 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8350 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8351 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8352 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8353 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8354 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00010000, PPC_FLOAT),
8355 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT),
8356 #if defined(TARGET_PPC64)
8357 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8358 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8359 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8360 #endif
8361 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8362 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8363 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8364 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8365 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8366 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8367 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8368 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8369 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8370 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8371 #if defined(TARGET_PPC64)
8372 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8373 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8374 #endif
8375 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8376 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8377 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8378 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8379 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8380 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8381 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8382 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8383 #if defined(TARGET_PPC64)
8384 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8385 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8386 #endif
8387 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8388 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8389 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8390 #if defined(TARGET_PPC64)
8391 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8392 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8393 #endif
8394 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8395 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8396 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8397 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8398 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8399 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8400 #if defined(TARGET_PPC64)
8401 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8402 #endif
8403 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8404 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8405 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8406 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8407 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8408 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8409 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8410 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ),
8411 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT),
8412 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8413 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8414 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8415 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8416 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8417 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8418 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8419 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8420 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8421 #if defined(TARGET_PPC64)
8422 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8423 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8424 PPC_SEGMENT_64B),
8425 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8426 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8427 PPC_SEGMENT_64B),
8428 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8429 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8430 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8431 #endif
8432 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8433 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8434 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8435 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8436 #if defined(TARGET_PPC64)
8437 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8438 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8439 #endif
8440 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8441 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8442 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8443 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8444 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8445 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8446 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8447 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8448 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8449 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8450 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8451 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8452 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8453 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8454 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8455 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8456 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8457 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8458 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8459 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8460 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8461 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8462 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8463 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8464 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8465 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8466 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8467 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8468 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8469 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8470 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8471 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8472 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8473 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8474 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8475 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8476 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8477 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8478 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8479 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8480 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8481 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8482 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8483 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8484 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8485 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8486 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8487 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8488 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8489 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8490 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8491 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8492 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8493 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8494 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8495 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8496 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8497 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8498 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8499 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8500 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8501 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8502 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8503 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8504 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8505 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8506 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8507 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8508 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8509 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8510 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8511 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8512 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8513 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8514 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8515 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8516 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8517 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8518 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8519 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8520 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8521 PPC_NONE, PPC2_BOOKE206),
8522 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8523 PPC_NONE, PPC2_BOOKE206),
8524 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8525 PPC_NONE, PPC2_BOOKE206),
8526 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8527 PPC_NONE, PPC2_BOOKE206),
8528 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8529 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8530 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8531 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8532 PPC_BOOKE, PPC2_BOOKE206),
8533 GEN_HANDLER_E(msync, 0x1F, 0x16, 0x12, 0x03FFF801,
8534 PPC_BOOKE, PPC2_BOOKE206),
8535 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8536 PPC_BOOKE, PPC2_BOOKE206),
8537 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8538 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8539 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8540 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8541 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8542 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8543 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8544 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8545 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8546 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8548 #undef GEN_INT_ARITH_ADD
8549 #undef GEN_INT_ARITH_ADD_CONST
8550 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8551 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8552 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8553 add_ca, compute_ca, compute_ov) \
8554 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8555 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8556 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8557 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8558 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8559 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8560 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8561 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8562 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8563 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8564 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8566 #undef GEN_INT_ARITH_DIVW
8567 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8568 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8569 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8570 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8571 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8572 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8574 #if defined(TARGET_PPC64)
8575 #undef GEN_INT_ARITH_DIVD
8576 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8577 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8578 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8579 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8580 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8581 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8583 #undef GEN_INT_ARITH_MUL_HELPER
8584 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8585 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8586 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8587 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8588 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8589 #endif
8591 #undef GEN_INT_ARITH_SUBF
8592 #undef GEN_INT_ARITH_SUBF_CONST
8593 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8594 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8595 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8596 add_ca, compute_ca, compute_ov) \
8597 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8598 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8599 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8600 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8601 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8602 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8603 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8604 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8605 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8606 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8607 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8609 #undef GEN_LOGICAL1
8610 #undef GEN_LOGICAL2
8611 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8612 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8613 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8614 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8615 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8616 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8617 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8618 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8619 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8620 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8621 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8622 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8623 #if defined(TARGET_PPC64)
8624 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8625 #endif
8627 #if defined(TARGET_PPC64)
8628 #undef GEN_PPC64_R2
8629 #undef GEN_PPC64_R4
8630 #define GEN_PPC64_R2(name, opc1, opc2) \
8631 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8632 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8633 PPC_64B)
8634 #define GEN_PPC64_R4(name, opc1, opc2) \
8635 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8636 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8637 PPC_64B), \
8638 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8639 PPC_64B), \
8640 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8641 PPC_64B)
8642 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8643 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8644 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8645 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8646 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8647 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8648 #endif
8650 #undef _GEN_FLOAT_ACB
8651 #undef GEN_FLOAT_ACB
8652 #undef _GEN_FLOAT_AB
8653 #undef GEN_FLOAT_AB
8654 #undef _GEN_FLOAT_AC
8655 #undef GEN_FLOAT_AC
8656 #undef GEN_FLOAT_B
8657 #undef GEN_FLOAT_BS
8658 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
8659 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
8660 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
8661 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
8662 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
8663 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8664 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8665 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
8666 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8667 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8668 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8669 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8670 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
8671 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8672 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8673 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
8674 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
8675 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
8676 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
8678 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
8679 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
8680 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
8681 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
8682 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
8683 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
8684 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
8685 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
8686 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
8687 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
8688 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
8689 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
8690 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
8691 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
8692 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
8693 #if defined(TARGET_PPC64)
8694 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
8695 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
8696 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
8697 #endif
8698 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
8699 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
8700 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
8701 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
8702 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT),
8703 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT),
8704 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT),
8706 #undef GEN_LD
8707 #undef GEN_LDU
8708 #undef GEN_LDUX
8709 #undef GEN_LDX
8710 #undef GEN_LDS
8711 #define GEN_LD(name, ldop, opc, type) \
8712 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8713 #define GEN_LDU(name, ldop, opc, type) \
8714 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8715 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8716 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8717 #define GEN_LDX(name, ldop, opc2, opc3, type) \
8718 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8719 #define GEN_LDS(name, ldop, op, type) \
8720 GEN_LD(name, ldop, op | 0x20, type) \
8721 GEN_LDU(name, ldop, op | 0x21, type) \
8722 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8723 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8725 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8726 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8727 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8728 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8729 #if defined(TARGET_PPC64)
8730 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8731 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8732 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
8733 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
8734 #endif
8735 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8736 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8738 #undef GEN_ST
8739 #undef GEN_STU
8740 #undef GEN_STUX
8741 #undef GEN_STX
8742 #undef GEN_STS
8743 #define GEN_ST(name, stop, opc, type) \
8744 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8745 #define GEN_STU(name, stop, opc, type) \
8746 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8747 #define GEN_STUX(name, stop, opc2, opc3, type) \
8748 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8749 #define GEN_STX(name, stop, opc2, opc3, type) \
8750 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8751 #define GEN_STS(name, stop, op, type) \
8752 GEN_ST(name, stop, op | 0x20, type) \
8753 GEN_STU(name, stop, op | 0x21, type) \
8754 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8755 GEN_STX(name, stop, 0x17, op | 0x00, type)
8757 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8758 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8759 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8760 #if defined(TARGET_PPC64)
8761 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
8762 GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
8763 #endif
8764 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8765 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8767 #undef GEN_LDF
8768 #undef GEN_LDUF
8769 #undef GEN_LDUXF
8770 #undef GEN_LDXF
8771 #undef GEN_LDFS
8772 #define GEN_LDF(name, ldop, opc, type) \
8773 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8774 #define GEN_LDUF(name, ldop, opc, type) \
8775 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8776 #define GEN_LDUXF(name, ldop, opc, type) \
8777 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8778 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
8779 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8780 #define GEN_LDFS(name, ldop, op, type) \
8781 GEN_LDF(name, ldop, op | 0x20, type) \
8782 GEN_LDUF(name, ldop, op | 0x21, type) \
8783 GEN_LDUXF(name, ldop, op | 0x01, type) \
8784 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
8786 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
8787 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
8789 #undef GEN_STF
8790 #undef GEN_STUF
8791 #undef GEN_STUXF
8792 #undef GEN_STXF
8793 #undef GEN_STFS
8794 #define GEN_STF(name, stop, opc, type) \
8795 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8796 #define GEN_STUF(name, stop, opc, type) \
8797 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8798 #define GEN_STUXF(name, stop, opc, type) \
8799 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8800 #define GEN_STXF(name, stop, opc2, opc3, type) \
8801 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8802 #define GEN_STFS(name, stop, op, type) \
8803 GEN_STF(name, stop, op | 0x20, type) \
8804 GEN_STUF(name, stop, op | 0x21, type) \
8805 GEN_STUXF(name, stop, op | 0x01, type) \
8806 GEN_STXF(name, stop, 0x17, op | 0x00, type)
8808 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
8809 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
8810 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
8812 #undef GEN_CRLOGIC
8813 #define GEN_CRLOGIC(name, tcg_op, opc) \
8814 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8815 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8816 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8817 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8818 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8819 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8820 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8821 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8822 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8824 #undef GEN_MAC_HANDLER
8825 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8826 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8827 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8828 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8829 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8830 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8831 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8832 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8833 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8834 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8835 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8836 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8837 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8838 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8839 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8840 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8841 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8842 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8843 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8844 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8845 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8846 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8847 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8848 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8849 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8850 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8851 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8852 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8853 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8854 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8855 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8856 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8857 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8858 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8859 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8860 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8861 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8862 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8863 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8864 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8865 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8866 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8867 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8868 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8870 #undef GEN_VR_LDX
8871 #undef GEN_VR_STX
8872 #undef GEN_VR_LVE
8873 #undef GEN_VR_STVE
8874 #define GEN_VR_LDX(name, opc2, opc3) \
8875 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8876 #define GEN_VR_STX(name, opc2, opc3) \
8877 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8878 #define GEN_VR_LVE(name, opc2, opc3) \
8879 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8880 #define GEN_VR_STVE(name, opc2, opc3) \
8881 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8882 GEN_VR_LDX(lvx, 0x07, 0x03),
8883 GEN_VR_LDX(lvxl, 0x07, 0x0B),
8884 GEN_VR_LVE(bx, 0x07, 0x00),
8885 GEN_VR_LVE(hx, 0x07, 0x01),
8886 GEN_VR_LVE(wx, 0x07, 0x02),
8887 GEN_VR_STX(svx, 0x07, 0x07),
8888 GEN_VR_STX(svxl, 0x07, 0x0F),
8889 GEN_VR_STVE(bx, 0x07, 0x04),
8890 GEN_VR_STVE(hx, 0x07, 0x05),
8891 GEN_VR_STVE(wx, 0x07, 0x06),
8893 #undef GEN_VX_LOGICAL
8894 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
8895 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8896 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
8897 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
8898 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
8899 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
8900 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
8902 #undef GEN_VXFORM
8903 #define GEN_VXFORM(name, opc2, opc3) \
8904 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8905 GEN_VXFORM(vaddubm, 0, 0),
8906 GEN_VXFORM(vadduhm, 0, 1),
8907 GEN_VXFORM(vadduwm, 0, 2),
8908 GEN_VXFORM(vsububm, 0, 16),
8909 GEN_VXFORM(vsubuhm, 0, 17),
8910 GEN_VXFORM(vsubuwm, 0, 18),
8911 GEN_VXFORM(vmaxub, 1, 0),
8912 GEN_VXFORM(vmaxuh, 1, 1),
8913 GEN_VXFORM(vmaxuw, 1, 2),
8914 GEN_VXFORM(vmaxsb, 1, 4),
8915 GEN_VXFORM(vmaxsh, 1, 5),
8916 GEN_VXFORM(vmaxsw, 1, 6),
8917 GEN_VXFORM(vminub, 1, 8),
8918 GEN_VXFORM(vminuh, 1, 9),
8919 GEN_VXFORM(vminuw, 1, 10),
8920 GEN_VXFORM(vminsb, 1, 12),
8921 GEN_VXFORM(vminsh, 1, 13),
8922 GEN_VXFORM(vminsw, 1, 14),
8923 GEN_VXFORM(vavgub, 1, 16),
8924 GEN_VXFORM(vavguh, 1, 17),
8925 GEN_VXFORM(vavguw, 1, 18),
8926 GEN_VXFORM(vavgsb, 1, 20),
8927 GEN_VXFORM(vavgsh, 1, 21),
8928 GEN_VXFORM(vavgsw, 1, 22),
8929 GEN_VXFORM(vmrghb, 6, 0),
8930 GEN_VXFORM(vmrghh, 6, 1),
8931 GEN_VXFORM(vmrghw, 6, 2),
8932 GEN_VXFORM(vmrglb, 6, 4),
8933 GEN_VXFORM(vmrglh, 6, 5),
8934 GEN_VXFORM(vmrglw, 6, 6),
8935 GEN_VXFORM(vmuloub, 4, 0),
8936 GEN_VXFORM(vmulouh, 4, 1),
8937 GEN_VXFORM(vmulosb, 4, 4),
8938 GEN_VXFORM(vmulosh, 4, 5),
8939 GEN_VXFORM(vmuleub, 4, 8),
8940 GEN_VXFORM(vmuleuh, 4, 9),
8941 GEN_VXFORM(vmulesb, 4, 12),
8942 GEN_VXFORM(vmulesh, 4, 13),
8943 GEN_VXFORM(vslb, 2, 4),
8944 GEN_VXFORM(vslh, 2, 5),
8945 GEN_VXFORM(vslw, 2, 6),
8946 GEN_VXFORM(vsrb, 2, 8),
8947 GEN_VXFORM(vsrh, 2, 9),
8948 GEN_VXFORM(vsrw, 2, 10),
8949 GEN_VXFORM(vsrab, 2, 12),
8950 GEN_VXFORM(vsrah, 2, 13),
8951 GEN_VXFORM(vsraw, 2, 14),
8952 GEN_VXFORM(vslo, 6, 16),
8953 GEN_VXFORM(vsro, 6, 17),
8954 GEN_VXFORM(vaddcuw, 0, 6),
8955 GEN_VXFORM(vsubcuw, 0, 22),
8956 GEN_VXFORM(vaddubs, 0, 8),
8957 GEN_VXFORM(vadduhs, 0, 9),
8958 GEN_VXFORM(vadduws, 0, 10),
8959 GEN_VXFORM(vaddsbs, 0, 12),
8960 GEN_VXFORM(vaddshs, 0, 13),
8961 GEN_VXFORM(vaddsws, 0, 14),
8962 GEN_VXFORM(vsububs, 0, 24),
8963 GEN_VXFORM(vsubuhs, 0, 25),
8964 GEN_VXFORM(vsubuws, 0, 26),
8965 GEN_VXFORM(vsubsbs, 0, 28),
8966 GEN_VXFORM(vsubshs, 0, 29),
8967 GEN_VXFORM(vsubsws, 0, 30),
8968 GEN_VXFORM(vrlb, 2, 0),
8969 GEN_VXFORM(vrlh, 2, 1),
8970 GEN_VXFORM(vrlw, 2, 2),
8971 GEN_VXFORM(vsl, 2, 7),
8972 GEN_VXFORM(vsr, 2, 11),
8973 GEN_VXFORM(vpkuhum, 7, 0),
8974 GEN_VXFORM(vpkuwum, 7, 1),
8975 GEN_VXFORM(vpkuhus, 7, 2),
8976 GEN_VXFORM(vpkuwus, 7, 3),
8977 GEN_VXFORM(vpkshus, 7, 4),
8978 GEN_VXFORM(vpkswus, 7, 5),
8979 GEN_VXFORM(vpkshss, 7, 6),
8980 GEN_VXFORM(vpkswss, 7, 7),
8981 GEN_VXFORM(vpkpx, 7, 12),
8982 GEN_VXFORM(vsum4ubs, 4, 24),
8983 GEN_VXFORM(vsum4sbs, 4, 28),
8984 GEN_VXFORM(vsum4shs, 4, 25),
8985 GEN_VXFORM(vsum2sws, 4, 26),
8986 GEN_VXFORM(vsumsws, 4, 30),
8987 GEN_VXFORM(vaddfp, 5, 0),
8988 GEN_VXFORM(vsubfp, 5, 1),
8989 GEN_VXFORM(vmaxfp, 5, 16),
8990 GEN_VXFORM(vminfp, 5, 17),
8992 #undef GEN_VXRFORM1
8993 #undef GEN_VXRFORM
8994 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
8995 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
8996 #define GEN_VXRFORM(name, opc2, opc3) \
8997 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
8998 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
8999 GEN_VXRFORM(vcmpequb, 3, 0)
9000 GEN_VXRFORM(vcmpequh, 3, 1)
9001 GEN_VXRFORM(vcmpequw, 3, 2)
9002 GEN_VXRFORM(vcmpgtsb, 3, 12)
9003 GEN_VXRFORM(vcmpgtsh, 3, 13)
9004 GEN_VXRFORM(vcmpgtsw, 3, 14)
9005 GEN_VXRFORM(vcmpgtub, 3, 8)
9006 GEN_VXRFORM(vcmpgtuh, 3, 9)
9007 GEN_VXRFORM(vcmpgtuw, 3, 10)
9008 GEN_VXRFORM(vcmpeqfp, 3, 3)
9009 GEN_VXRFORM(vcmpgefp, 3, 7)
9010 GEN_VXRFORM(vcmpgtfp, 3, 11)
9011 GEN_VXRFORM(vcmpbfp, 3, 15)
9013 #undef GEN_VXFORM_SIMM
9014 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
9015 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9016 GEN_VXFORM_SIMM(vspltisb, 6, 12),
9017 GEN_VXFORM_SIMM(vspltish, 6, 13),
9018 GEN_VXFORM_SIMM(vspltisw, 6, 14),
9020 #undef GEN_VXFORM_NOA
9021 #define GEN_VXFORM_NOA(name, opc2, opc3) \
9022 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9023 GEN_VXFORM_NOA(vupkhsb, 7, 8),
9024 GEN_VXFORM_NOA(vupkhsh, 7, 9),
9025 GEN_VXFORM_NOA(vupklsb, 7, 10),
9026 GEN_VXFORM_NOA(vupklsh, 7, 11),
9027 GEN_VXFORM_NOA(vupkhpx, 7, 13),
9028 GEN_VXFORM_NOA(vupklpx, 7, 15),
9029 GEN_VXFORM_NOA(vrefp, 5, 4),
9030 GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9031 GEN_VXFORM_NOA(vexptefp, 5, 6),
9032 GEN_VXFORM_NOA(vlogefp, 5, 7),
9033 GEN_VXFORM_NOA(vrfim, 5, 8),
9034 GEN_VXFORM_NOA(vrfin, 5, 9),
9035 GEN_VXFORM_NOA(vrfip, 5, 10),
9036 GEN_VXFORM_NOA(vrfiz, 5, 11),
9038 #undef GEN_VXFORM_UIMM
9039 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
9040 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9041 GEN_VXFORM_UIMM(vspltb, 6, 8),
9042 GEN_VXFORM_UIMM(vsplth, 6, 9),
9043 GEN_VXFORM_UIMM(vspltw, 6, 10),
9044 GEN_VXFORM_UIMM(vcfux, 5, 12),
9045 GEN_VXFORM_UIMM(vcfsx, 5, 13),
9046 GEN_VXFORM_UIMM(vctuxs, 5, 14),
9047 GEN_VXFORM_UIMM(vctsxs, 5, 15),
9049 #undef GEN_VAFORM_PAIRED
9050 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9051 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9052 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9053 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9054 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9055 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9056 GEN_VAFORM_PAIRED(vsel, vperm, 21),
9057 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9059 #undef GEN_SPE
9060 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
9061 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type)
9062 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE),
9063 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE),
9064 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE),
9065 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE),
9066 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE),
9067 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE),
9068 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE),
9069 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE),
9070 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE),
9071 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE),
9072 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE),
9073 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE),
9074 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE),
9075 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE),
9076 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE),
9077 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE),
9078 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE),
9079 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE),
9080 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE),
9081 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE),
9082 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE),
9083 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE),
9084 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE),
9085 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE),
9086 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE),
9087 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE),
9088 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE),
9089 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE),
9090 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE),
9092 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9093 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9094 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9095 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9096 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9097 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9098 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9099 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9100 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9101 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9102 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9103 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9104 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9105 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9107 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9108 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9109 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9110 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9111 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9112 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9113 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9114 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9115 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9116 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9117 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9118 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9119 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9120 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9122 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9123 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9124 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9125 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9126 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9127 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9128 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9129 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9130 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9131 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9132 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9133 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9134 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9135 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9136 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9137 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9139 #undef GEN_SPEOP_LDST
9140 #define GEN_SPEOP_LDST(name, opc2, sh) \
9141 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9142 GEN_SPEOP_LDST(evldd, 0x00, 3),
9143 GEN_SPEOP_LDST(evldw, 0x01, 3),
9144 GEN_SPEOP_LDST(evldh, 0x02, 3),
9145 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9146 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9147 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9148 GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9149 GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9150 GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9151 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9152 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9154 GEN_SPEOP_LDST(evstdd, 0x10, 3),
9155 GEN_SPEOP_LDST(evstdw, 0x11, 3),
9156 GEN_SPEOP_LDST(evstdh, 0x12, 3),
9157 GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9158 GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9159 GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9160 GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9163 #include "translate_init.c"
9164 #include "helper_regs.h"
9166 /*****************************************************************************/
9167 /* Misc PowerPC helpers */
9168 void cpu_dump_state (CPUState *env, FILE *f, fprintf_function cpu_fprintf,
9169 int flags)
9171 #define RGPL 4
9172 #define RFPL 4
9174 int i;
9176 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9177 TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9178 env->nip, env->lr, env->ctr, env->xer);
9179 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9180 TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9181 env->hflags, env->mmu_idx);
9182 #if !defined(NO_TIMER_DUMP)
9183 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9184 #if !defined(CONFIG_USER_ONLY)
9185 " DECR %08" PRIu32
9186 #endif
9187 "\n",
9188 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9189 #if !defined(CONFIG_USER_ONLY)
9190 , cpu_ppc_load_decr(env)
9191 #endif
9193 #endif
9194 for (i = 0; i < 32; i++) {
9195 if ((i & (RGPL - 1)) == 0)
9196 cpu_fprintf(f, "GPR%02d", i);
9197 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9198 if ((i & (RGPL - 1)) == (RGPL - 1))
9199 cpu_fprintf(f, "\n");
9201 cpu_fprintf(f, "CR ");
9202 for (i = 0; i < 8; i++)
9203 cpu_fprintf(f, "%01x", env->crf[i]);
9204 cpu_fprintf(f, " [");
9205 for (i = 0; i < 8; i++) {
9206 char a = '-';
9207 if (env->crf[i] & 0x08)
9208 a = 'L';
9209 else if (env->crf[i] & 0x04)
9210 a = 'G';
9211 else if (env->crf[i] & 0x02)
9212 a = 'E';
9213 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9215 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9216 env->reserve_addr);
9217 for (i = 0; i < 32; i++) {
9218 if ((i & (RFPL - 1)) == 0)
9219 cpu_fprintf(f, "FPR%02d", i);
9220 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9221 if ((i & (RFPL - 1)) == (RFPL - 1))
9222 cpu_fprintf(f, "\n");
9224 cpu_fprintf(f, "FPSCR %08x\n", env->fpscr);
9225 #if !defined(CONFIG_USER_ONLY)
9226 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9227 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9228 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9229 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9231 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9232 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9233 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9234 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9236 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9237 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9238 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9239 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9241 if (env->excp_model == POWERPC_EXCP_BOOKE) {
9242 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9243 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9244 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9245 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9247 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9248 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9249 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9250 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9252 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9253 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9254 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9255 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9257 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9258 " EPR " TARGET_FMT_lx "\n",
9259 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9260 env->spr[SPR_BOOKE_EPR]);
9262 /* FSL-specific */
9263 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9264 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9265 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9266 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9269 * IVORs are left out as they are large and do not change often --
9270 * they can be read with "p $ivor0", "p $ivor1", etc.
9274 switch (env->mmu_model) {
9275 case POWERPC_MMU_32B:
9276 case POWERPC_MMU_601:
9277 case POWERPC_MMU_SOFT_6xx:
9278 case POWERPC_MMU_SOFT_74xx:
9279 #if defined(TARGET_PPC64)
9280 case POWERPC_MMU_620:
9281 case POWERPC_MMU_64B:
9282 #endif
9283 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9284 break;
9285 case POWERPC_MMU_BOOKE206:
9286 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9287 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9288 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9289 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9291 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9292 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9293 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9294 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9296 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9297 " TLB1CFG " TARGET_FMT_lx "\n",
9298 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9299 env->spr[SPR_BOOKE_TLB1CFG]);
9300 break;
9301 default:
9302 break;
9304 #endif
9306 #undef RGPL
9307 #undef RFPL
9310 void cpu_dump_statistics (CPUState *env, FILE*f, fprintf_function cpu_fprintf,
9311 int flags)
9313 #if defined(DO_PPC_STATISTICS)
9314 opc_handler_t **t1, **t2, **t3, *handler;
9315 int op1, op2, op3;
9317 t1 = env->opcodes;
9318 for (op1 = 0; op1 < 64; op1++) {
9319 handler = t1[op1];
9320 if (is_indirect_opcode(handler)) {
9321 t2 = ind_table(handler);
9322 for (op2 = 0; op2 < 32; op2++) {
9323 handler = t2[op2];
9324 if (is_indirect_opcode(handler)) {
9325 t3 = ind_table(handler);
9326 for (op3 = 0; op3 < 32; op3++) {
9327 handler = t3[op3];
9328 if (handler->count == 0)
9329 continue;
9330 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9331 "%016" PRIx64 " %" PRId64 "\n",
9332 op1, op2, op3, op1, (op3 << 5) | op2,
9333 handler->oname,
9334 handler->count, handler->count);
9336 } else {
9337 if (handler->count == 0)
9338 continue;
9339 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9340 "%016" PRIx64 " %" PRId64 "\n",
9341 op1, op2, op1, op2, handler->oname,
9342 handler->count, handler->count);
9345 } else {
9346 if (handler->count == 0)
9347 continue;
9348 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9349 " %" PRId64 "\n",
9350 op1, op1, handler->oname,
9351 handler->count, handler->count);
9354 #endif
9357 /*****************************************************************************/
9358 static inline void gen_intermediate_code_internal(CPUState *env,
9359 TranslationBlock *tb,
9360 int search_pc)
9362 DisasContext ctx, *ctxp = &ctx;
9363 opc_handler_t **table, *handler;
9364 target_ulong pc_start;
9365 uint16_t *gen_opc_end;
9366 CPUBreakpoint *bp;
9367 int j, lj = -1;
9368 int num_insns;
9369 int max_insns;
9371 pc_start = tb->pc;
9372 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
9373 ctx.nip = pc_start;
9374 ctx.tb = tb;
9375 ctx.exception = POWERPC_EXCP_NONE;
9376 ctx.spr_cb = env->spr_cb;
9377 ctx.mem_idx = env->mmu_idx;
9378 ctx.access_type = -1;
9379 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9380 #if defined(TARGET_PPC64)
9381 ctx.sf_mode = msr_sf;
9382 #endif
9383 ctx.fpu_enabled = msr_fp;
9384 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9385 ctx.spe_enabled = msr_spe;
9386 else
9387 ctx.spe_enabled = 0;
9388 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9389 ctx.altivec_enabled = msr_vr;
9390 else
9391 ctx.altivec_enabled = 0;
9392 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9393 ctx.singlestep_enabled = CPU_SINGLE_STEP;
9394 else
9395 ctx.singlestep_enabled = 0;
9396 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9397 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9398 if (unlikely(env->singlestep_enabled))
9399 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9400 #if defined (DO_SINGLE_STEP) && 0
9401 /* Single step trace mode */
9402 msr_se = 1;
9403 #endif
9404 num_insns = 0;
9405 max_insns = tb->cflags & CF_COUNT_MASK;
9406 if (max_insns == 0)
9407 max_insns = CF_COUNT_MASK;
9409 gen_icount_start();
9410 /* Set env in case of segfault during code fetch */
9411 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
9412 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9413 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9414 if (bp->pc == ctx.nip) {
9415 gen_debug_exception(ctxp);
9416 break;
9420 if (unlikely(search_pc)) {
9421 j = gen_opc_ptr - gen_opc_buf;
9422 if (lj < j) {
9423 lj++;
9424 while (lj < j)
9425 gen_opc_instr_start[lj++] = 0;
9427 gen_opc_pc[lj] = ctx.nip;
9428 gen_opc_instr_start[lj] = 1;
9429 gen_opc_icount[lj] = num_insns;
9431 LOG_DISAS("----------------\n");
9432 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9433 ctx.nip, ctx.mem_idx, (int)msr_ir);
9434 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9435 gen_io_start();
9436 if (unlikely(ctx.le_mode)) {
9437 ctx.opcode = bswap32(ldl_code(ctx.nip));
9438 } else {
9439 ctx.opcode = ldl_code(ctx.nip);
9441 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9442 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9443 opc3(ctx.opcode), little_endian ? "little" : "big");
9444 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
9445 tcg_gen_debug_insn_start(ctx.nip);
9446 ctx.nip += 4;
9447 table = env->opcodes;
9448 num_insns++;
9449 handler = table[opc1(ctx.opcode)];
9450 if (is_indirect_opcode(handler)) {
9451 table = ind_table(handler);
9452 handler = table[opc2(ctx.opcode)];
9453 if (is_indirect_opcode(handler)) {
9454 table = ind_table(handler);
9455 handler = table[opc3(ctx.opcode)];
9458 /* Is opcode *REALLY* valid ? */
9459 if (unlikely(handler->handler == &gen_invalid)) {
9460 if (qemu_log_enabled()) {
9461 qemu_log("invalid/unsupported opcode: "
9462 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9463 opc1(ctx.opcode), opc2(ctx.opcode),
9464 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9466 } else {
9467 if (unlikely((ctx.opcode & handler->inval) != 0)) {
9468 if (qemu_log_enabled()) {
9469 qemu_log("invalid bits: %08x for opcode: "
9470 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9471 ctx.opcode & handler->inval, opc1(ctx.opcode),
9472 opc2(ctx.opcode), opc3(ctx.opcode),
9473 ctx.opcode, ctx.nip - 4);
9475 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9476 break;
9479 (*(handler->handler))(&ctx);
9480 #if defined(DO_PPC_STATISTICS)
9481 handler->count++;
9482 #endif
9483 /* Check trace mode exceptions */
9484 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9485 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9486 ctx.exception != POWERPC_SYSCALL &&
9487 ctx.exception != POWERPC_EXCP_TRAP &&
9488 ctx.exception != POWERPC_EXCP_BRANCH)) {
9489 gen_exception(ctxp, POWERPC_EXCP_TRACE);
9490 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9491 (env->singlestep_enabled) ||
9492 singlestep ||
9493 num_insns >= max_insns)) {
9494 /* if we reach a page boundary or are single stepping, stop
9495 * generation
9497 break;
9500 if (tb->cflags & CF_LAST_IO)
9501 gen_io_end();
9502 if (ctx.exception == POWERPC_EXCP_NONE) {
9503 gen_goto_tb(&ctx, 0, ctx.nip);
9504 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9505 if (unlikely(env->singlestep_enabled)) {
9506 gen_debug_exception(ctxp);
9508 /* Generate the return instruction */
9509 tcg_gen_exit_tb(0);
9511 gen_icount_end(tb, num_insns);
9512 *gen_opc_ptr = INDEX_op_end;
9513 if (unlikely(search_pc)) {
9514 j = gen_opc_ptr - gen_opc_buf;
9515 lj++;
9516 while (lj <= j)
9517 gen_opc_instr_start[lj++] = 0;
9518 } else {
9519 tb->size = ctx.nip - pc_start;
9520 tb->icount = num_insns;
9522 #if defined(DEBUG_DISAS)
9523 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9524 int flags;
9525 flags = env->bfd_mach;
9526 flags |= ctx.le_mode << 16;
9527 qemu_log("IN: %s\n", lookup_symbol(pc_start));
9528 log_target_disas(pc_start, ctx.nip - pc_start, flags);
9529 qemu_log("\n");
9531 #endif
9534 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
9536 gen_intermediate_code_internal(env, tb, 0);
9539 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
9541 gen_intermediate_code_internal(env, tb, 1);
9544 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
9546 env->nip = gen_opc_pc[pc_pos];