Merge remote-tracking branch 'kwolf/for-anthony' into staging
[qemu.git] / target-ppc / translate.c
blob59aef855d42b4c7895c7685e67308b02cd6b8a8f
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "tcg-op.h"
30 #include "qemu-common.h"
31 #include "host-utils.h"
33 #include "helper.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define CPU_SINGLE_STEP 0x1
38 #define CPU_BRANCH_STEP 0x2
39 #define GDBSTUB_SINGLE_STEP 0x4
41 /* Include definitions for instructions classes and implementations flags */
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
45 #ifdef PPC_DEBUG_DISAS
46 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
47 #else
48 # define LOG_DISAS(...) do { } while (0)
49 #endif
50 /*****************************************************************************/
51 /* Code translation helpers */
53 /* global register indexes */
54 static TCGv_ptr cpu_env;
55 static char cpu_reg_names[10*3 + 22*4 /* GPR */
56 #if !defined(TARGET_PPC64)
57 + 10*4 + 22*5 /* SPE GPRh */
58 #endif
59 + 10*4 + 22*5 /* FPR */
60 + 2*(10*6 + 22*7) /* AVRh, AVRl */
61 + 8*5 /* CRF */];
62 static TCGv cpu_gpr[32];
63 #if !defined(TARGET_PPC64)
64 static TCGv cpu_gprh[32];
65 #endif
66 static TCGv_i64 cpu_fpr[32];
67 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
68 static TCGv_i32 cpu_crf[8];
69 static TCGv cpu_nip;
70 static TCGv cpu_msr;
71 static TCGv cpu_ctr;
72 static TCGv cpu_lr;
73 static TCGv cpu_xer;
74 static TCGv cpu_reserve;
75 static TCGv_i32 cpu_fpscr;
76 static TCGv_i32 cpu_access_type;
78 #include "gen-icount.h"
80 void ppc_translate_init(void)
82 int i;
83 char* p;
84 size_t cpu_reg_names_size;
85 static int done_init = 0;
87 if (done_init)
88 return;
90 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
92 p = cpu_reg_names;
93 cpu_reg_names_size = sizeof(cpu_reg_names);
95 for (i = 0; i < 8; i++) {
96 snprintf(p, cpu_reg_names_size, "crf%d", i);
97 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
98 offsetof(CPUState, crf[i]), p);
99 p += 5;
100 cpu_reg_names_size -= 5;
103 for (i = 0; i < 32; i++) {
104 snprintf(p, cpu_reg_names_size, "r%d", i);
105 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
106 offsetof(CPUState, gpr[i]), p);
107 p += (i < 10) ? 3 : 4;
108 cpu_reg_names_size -= (i < 10) ? 3 : 4;
109 #if !defined(TARGET_PPC64)
110 snprintf(p, cpu_reg_names_size, "r%dH", i);
111 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
112 offsetof(CPUState, gprh[i]), p);
113 p += (i < 10) ? 4 : 5;
114 cpu_reg_names_size -= (i < 10) ? 4 : 5;
115 #endif
117 snprintf(p, cpu_reg_names_size, "fp%d", i);
118 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
119 offsetof(CPUState, fpr[i]), p);
120 p += (i < 10) ? 4 : 5;
121 cpu_reg_names_size -= (i < 10) ? 4 : 5;
123 snprintf(p, cpu_reg_names_size, "avr%dH", i);
124 #ifdef HOST_WORDS_BIGENDIAN
125 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
126 offsetof(CPUState, avr[i].u64[0]), p);
127 #else
128 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
129 offsetof(CPUState, avr[i].u64[1]), p);
130 #endif
131 p += (i < 10) ? 6 : 7;
132 cpu_reg_names_size -= (i < 10) ? 6 : 7;
134 snprintf(p, cpu_reg_names_size, "avr%dL", i);
135 #ifdef HOST_WORDS_BIGENDIAN
136 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
137 offsetof(CPUState, avr[i].u64[1]), p);
138 #else
139 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
140 offsetof(CPUState, avr[i].u64[0]), p);
141 #endif
142 p += (i < 10) ? 6 : 7;
143 cpu_reg_names_size -= (i < 10) ? 6 : 7;
146 cpu_nip = tcg_global_mem_new(TCG_AREG0,
147 offsetof(CPUState, nip), "nip");
149 cpu_msr = tcg_global_mem_new(TCG_AREG0,
150 offsetof(CPUState, msr), "msr");
152 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
153 offsetof(CPUState, ctr), "ctr");
155 cpu_lr = tcg_global_mem_new(TCG_AREG0,
156 offsetof(CPUState, lr), "lr");
158 cpu_xer = tcg_global_mem_new(TCG_AREG0,
159 offsetof(CPUState, xer), "xer");
161 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
162 offsetof(CPUState, reserve_addr),
163 "reserve_addr");
165 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
166 offsetof(CPUState, fpscr), "fpscr");
168 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
169 offsetof(CPUState, access_type), "access_type");
171 /* register helpers */
172 #define GEN_HELPER 2
173 #include "helper.h"
175 done_init = 1;
178 /* internal defines */
179 typedef struct DisasContext {
180 struct TranslationBlock *tb;
181 target_ulong nip;
182 uint32_t opcode;
183 uint32_t exception;
184 /* Routine used to access memory */
185 int mem_idx;
186 int access_type;
187 /* Translation flags */
188 int le_mode;
189 #if defined(TARGET_PPC64)
190 int sf_mode;
191 #endif
192 int fpu_enabled;
193 int altivec_enabled;
194 int spe_enabled;
195 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
196 int singlestep_enabled;
197 } DisasContext;
199 struct opc_handler_t {
200 /* invalid bits */
201 uint32_t inval;
202 /* instruction type */
203 uint64_t type;
204 /* extended instruction type */
205 uint64_t type2;
206 /* handler */
207 void (*handler)(DisasContext *ctx);
208 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
209 const char *oname;
210 #endif
211 #if defined(DO_PPC_STATISTICS)
212 uint64_t count;
213 #endif
216 static inline void gen_reset_fpstatus(void)
218 gen_helper_reset_fpstatus();
221 static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
223 TCGv_i32 t0 = tcg_temp_new_i32();
225 if (set_fprf != 0) {
226 /* This case might be optimized later */
227 tcg_gen_movi_i32(t0, 1);
228 gen_helper_compute_fprf(t0, arg, t0);
229 if (unlikely(set_rc)) {
230 tcg_gen_mov_i32(cpu_crf[1], t0);
232 gen_helper_float_check_status();
233 } else if (unlikely(set_rc)) {
234 /* We always need to compute fpcc */
235 tcg_gen_movi_i32(t0, 0);
236 gen_helper_compute_fprf(t0, arg, t0);
237 tcg_gen_mov_i32(cpu_crf[1], t0);
240 tcg_temp_free_i32(t0);
243 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
245 if (ctx->access_type != access_type) {
246 tcg_gen_movi_i32(cpu_access_type, access_type);
247 ctx->access_type = access_type;
251 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
253 #if defined(TARGET_PPC64)
254 if (ctx->sf_mode)
255 tcg_gen_movi_tl(cpu_nip, nip);
256 else
257 #endif
258 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
261 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
263 TCGv_i32 t0, t1;
264 if (ctx->exception == POWERPC_EXCP_NONE) {
265 gen_update_nip(ctx, ctx->nip);
267 t0 = tcg_const_i32(excp);
268 t1 = tcg_const_i32(error);
269 gen_helper_raise_exception_err(t0, t1);
270 tcg_temp_free_i32(t0);
271 tcg_temp_free_i32(t1);
272 ctx->exception = (excp);
275 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
277 TCGv_i32 t0;
278 if (ctx->exception == POWERPC_EXCP_NONE) {
279 gen_update_nip(ctx, ctx->nip);
281 t0 = tcg_const_i32(excp);
282 gen_helper_raise_exception(t0);
283 tcg_temp_free_i32(t0);
284 ctx->exception = (excp);
287 static inline void gen_debug_exception(DisasContext *ctx)
289 TCGv_i32 t0;
291 if (ctx->exception != POWERPC_EXCP_BRANCH)
292 gen_update_nip(ctx, ctx->nip);
293 t0 = tcg_const_i32(EXCP_DEBUG);
294 gen_helper_raise_exception(t0);
295 tcg_temp_free_i32(t0);
298 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
300 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
303 /* Stop translation */
304 static inline void gen_stop_exception(DisasContext *ctx)
306 gen_update_nip(ctx, ctx->nip);
307 ctx->exception = POWERPC_EXCP_STOP;
310 /* No need to update nip here, as execution flow will change */
311 static inline void gen_sync_exception(DisasContext *ctx)
313 ctx->exception = POWERPC_EXCP_SYNC;
316 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
317 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
319 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
320 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
322 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
323 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
325 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
326 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
328 typedef struct opcode_t {
329 unsigned char opc1, opc2, opc3;
330 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
331 unsigned char pad[5];
332 #else
333 unsigned char pad[1];
334 #endif
335 opc_handler_t handler;
336 const char *oname;
337 } opcode_t;
339 /*****************************************************************************/
340 /*** Instruction decoding ***/
341 #define EXTRACT_HELPER(name, shift, nb) \
342 static inline uint32_t name(uint32_t opcode) \
344 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
347 #define EXTRACT_SHELPER(name, shift, nb) \
348 static inline int32_t name(uint32_t opcode) \
350 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
353 /* Opcode part 1 */
354 EXTRACT_HELPER(opc1, 26, 6);
355 /* Opcode part 2 */
356 EXTRACT_HELPER(opc2, 1, 5);
357 /* Opcode part 3 */
358 EXTRACT_HELPER(opc3, 6, 5);
359 /* Update Cr0 flags */
360 EXTRACT_HELPER(Rc, 0, 1);
361 /* Destination */
362 EXTRACT_HELPER(rD, 21, 5);
363 /* Source */
364 EXTRACT_HELPER(rS, 21, 5);
365 /* First operand */
366 EXTRACT_HELPER(rA, 16, 5);
367 /* Second operand */
368 EXTRACT_HELPER(rB, 11, 5);
369 /* Third operand */
370 EXTRACT_HELPER(rC, 6, 5);
371 /*** Get CRn ***/
372 EXTRACT_HELPER(crfD, 23, 3);
373 EXTRACT_HELPER(crfS, 18, 3);
374 EXTRACT_HELPER(crbD, 21, 5);
375 EXTRACT_HELPER(crbA, 16, 5);
376 EXTRACT_HELPER(crbB, 11, 5);
377 /* SPR / TBL */
378 EXTRACT_HELPER(_SPR, 11, 10);
379 static inline uint32_t SPR(uint32_t opcode)
381 uint32_t sprn = _SPR(opcode);
383 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
385 /*** Get constants ***/
386 EXTRACT_HELPER(IMM, 12, 8);
387 /* 16 bits signed immediate value */
388 EXTRACT_SHELPER(SIMM, 0, 16);
389 /* 16 bits unsigned immediate value */
390 EXTRACT_HELPER(UIMM, 0, 16);
391 /* 5 bits signed immediate value */
392 EXTRACT_HELPER(SIMM5, 16, 5);
393 /* 5 bits signed immediate value */
394 EXTRACT_HELPER(UIMM5, 16, 5);
395 /* Bit count */
396 EXTRACT_HELPER(NB, 11, 5);
397 /* Shift count */
398 EXTRACT_HELPER(SH, 11, 5);
399 /* Vector shift count */
400 EXTRACT_HELPER(VSH, 6, 4);
401 /* Mask start */
402 EXTRACT_HELPER(MB, 6, 5);
403 /* Mask end */
404 EXTRACT_HELPER(ME, 1, 5);
405 /* Trap operand */
406 EXTRACT_HELPER(TO, 21, 5);
408 EXTRACT_HELPER(CRM, 12, 8);
409 EXTRACT_HELPER(FM, 17, 8);
410 EXTRACT_HELPER(SR, 16, 4);
411 EXTRACT_HELPER(FPIMM, 12, 4);
413 /*** Jump target decoding ***/
414 /* Displacement */
415 EXTRACT_SHELPER(d, 0, 16);
416 /* Immediate address */
417 static inline target_ulong LI(uint32_t opcode)
419 return (opcode >> 0) & 0x03FFFFFC;
422 static inline uint32_t BD(uint32_t opcode)
424 return (opcode >> 0) & 0xFFFC;
427 EXTRACT_HELPER(BO, 21, 5);
428 EXTRACT_HELPER(BI, 16, 5);
429 /* Absolute/relative address */
430 EXTRACT_HELPER(AA, 1, 1);
431 /* Link */
432 EXTRACT_HELPER(LK, 0, 1);
434 /* Create a mask between <start> and <end> bits */
435 static inline target_ulong MASK(uint32_t start, uint32_t end)
437 target_ulong ret;
439 #if defined(TARGET_PPC64)
440 if (likely(start == 0)) {
441 ret = UINT64_MAX << (63 - end);
442 } else if (likely(end == 63)) {
443 ret = UINT64_MAX >> start;
445 #else
446 if (likely(start == 0)) {
447 ret = UINT32_MAX << (31 - end);
448 } else if (likely(end == 31)) {
449 ret = UINT32_MAX >> start;
451 #endif
452 else {
453 ret = (((target_ulong)(-1ULL)) >> (start)) ^
454 (((target_ulong)(-1ULL) >> (end)) >> 1);
455 if (unlikely(start > end))
456 return ~ret;
459 return ret;
462 /*****************************************************************************/
463 /* PowerPC instructions table */
465 #if defined(DO_PPC_STATISTICS)
466 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
468 .opc1 = op1, \
469 .opc2 = op2, \
470 .opc3 = op3, \
471 .pad = { 0, }, \
472 .handler = { \
473 .inval = invl, \
474 .type = _typ, \
475 .type2 = _typ2, \
476 .handler = &gen_##name, \
477 .oname = stringify(name), \
478 }, \
479 .oname = stringify(name), \
481 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
483 .opc1 = op1, \
484 .opc2 = op2, \
485 .opc3 = op3, \
486 .pad = { 0, }, \
487 .handler = { \
488 .inval = invl, \
489 .type = _typ, \
490 .type2 = _typ2, \
491 .handler = &gen_##name, \
492 .oname = onam, \
493 }, \
494 .oname = onam, \
496 #else
497 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
499 .opc1 = op1, \
500 .opc2 = op2, \
501 .opc3 = op3, \
502 .pad = { 0, }, \
503 .handler = { \
504 .inval = invl, \
505 .type = _typ, \
506 .type2 = _typ2, \
507 .handler = &gen_##name, \
508 }, \
509 .oname = stringify(name), \
511 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
513 .opc1 = op1, \
514 .opc2 = op2, \
515 .opc3 = op3, \
516 .pad = { 0, }, \
517 .handler = { \
518 .inval = invl, \
519 .type = _typ, \
520 .type2 = _typ2, \
521 .handler = &gen_##name, \
522 }, \
523 .oname = onam, \
525 #endif
527 /* SPR load/store helpers */
528 static inline void gen_load_spr(TCGv t, int reg)
530 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
533 static inline void gen_store_spr(int reg, TCGv t)
535 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
538 /* Invalid instruction */
539 static void gen_invalid(DisasContext *ctx)
541 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
544 static opc_handler_t invalid_handler = {
545 .inval = 0xFFFFFFFF,
546 .type = PPC_NONE,
547 .type2 = PPC_NONE,
548 .handler = gen_invalid,
551 /*** Integer comparison ***/
553 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
555 int l1, l2, l3;
557 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
558 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
559 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
561 l1 = gen_new_label();
562 l2 = gen_new_label();
563 l3 = gen_new_label();
564 if (s) {
565 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
566 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
567 } else {
568 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
569 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
571 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
572 tcg_gen_br(l3);
573 gen_set_label(l1);
574 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
575 tcg_gen_br(l3);
576 gen_set_label(l2);
577 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
578 gen_set_label(l3);
581 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
583 TCGv t0 = tcg_const_local_tl(arg1);
584 gen_op_cmp(arg0, t0, s, crf);
585 tcg_temp_free(t0);
588 #if defined(TARGET_PPC64)
589 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
591 TCGv t0, t1;
592 t0 = tcg_temp_local_new();
593 t1 = tcg_temp_local_new();
594 if (s) {
595 tcg_gen_ext32s_tl(t0, arg0);
596 tcg_gen_ext32s_tl(t1, arg1);
597 } else {
598 tcg_gen_ext32u_tl(t0, arg0);
599 tcg_gen_ext32u_tl(t1, arg1);
601 gen_op_cmp(t0, t1, s, crf);
602 tcg_temp_free(t1);
603 tcg_temp_free(t0);
606 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
608 TCGv t0 = tcg_const_local_tl(arg1);
609 gen_op_cmp32(arg0, t0, s, crf);
610 tcg_temp_free(t0);
612 #endif
614 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
616 #if defined(TARGET_PPC64)
617 if (!(ctx->sf_mode))
618 gen_op_cmpi32(reg, 0, 1, 0);
619 else
620 #endif
621 gen_op_cmpi(reg, 0, 1, 0);
624 /* cmp */
625 static void gen_cmp(DisasContext *ctx)
627 #if defined(TARGET_PPC64)
628 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
629 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
630 1, crfD(ctx->opcode));
631 else
632 #endif
633 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
634 1, crfD(ctx->opcode));
637 /* cmpi */
638 static void gen_cmpi(DisasContext *ctx)
640 #if defined(TARGET_PPC64)
641 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
642 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
643 1, crfD(ctx->opcode));
644 else
645 #endif
646 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
647 1, crfD(ctx->opcode));
650 /* cmpl */
651 static void gen_cmpl(DisasContext *ctx)
653 #if defined(TARGET_PPC64)
654 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
655 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
656 0, crfD(ctx->opcode));
657 else
658 #endif
659 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
660 0, crfD(ctx->opcode));
663 /* cmpli */
664 static void gen_cmpli(DisasContext *ctx)
666 #if defined(TARGET_PPC64)
667 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
668 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
669 0, crfD(ctx->opcode));
670 else
671 #endif
672 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
673 0, crfD(ctx->opcode));
676 /* isel (PowerPC 2.03 specification) */
677 static void gen_isel(DisasContext *ctx)
679 int l1, l2;
680 uint32_t bi = rC(ctx->opcode);
681 uint32_t mask;
682 TCGv_i32 t0;
684 l1 = gen_new_label();
685 l2 = gen_new_label();
687 mask = 1 << (3 - (bi & 0x03));
688 t0 = tcg_temp_new_i32();
689 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
690 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
691 if (rA(ctx->opcode) == 0)
692 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
693 else
694 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
695 tcg_gen_br(l2);
696 gen_set_label(l1);
697 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
698 gen_set_label(l2);
699 tcg_temp_free_i32(t0);
702 /*** Integer arithmetic ***/
704 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
705 TCGv arg1, TCGv arg2, int sub)
707 int l1;
708 TCGv t0;
710 l1 = gen_new_label();
711 /* Start with XER OV disabled, the most likely case */
712 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
713 t0 = tcg_temp_local_new();
714 tcg_gen_xor_tl(t0, arg0, arg1);
715 #if defined(TARGET_PPC64)
716 if (!ctx->sf_mode)
717 tcg_gen_ext32s_tl(t0, t0);
718 #endif
719 if (sub)
720 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
721 else
722 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
723 tcg_gen_xor_tl(t0, arg1, arg2);
724 #if defined(TARGET_PPC64)
725 if (!ctx->sf_mode)
726 tcg_gen_ext32s_tl(t0, t0);
727 #endif
728 if (sub)
729 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
730 else
731 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
732 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
733 gen_set_label(l1);
734 tcg_temp_free(t0);
737 static inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1,
738 TCGv arg2, int sub)
740 int l1 = gen_new_label();
742 #if defined(TARGET_PPC64)
743 if (!(ctx->sf_mode)) {
744 TCGv t0, t1;
745 t0 = tcg_temp_new();
746 t1 = tcg_temp_new();
748 tcg_gen_ext32u_tl(t0, arg1);
749 tcg_gen_ext32u_tl(t1, arg2);
750 if (sub) {
751 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
752 } else {
753 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
755 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
756 gen_set_label(l1);
757 tcg_temp_free(t0);
758 tcg_temp_free(t1);
759 } else
760 #endif
762 if (sub) {
763 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
764 } else {
765 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
767 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
768 gen_set_label(l1);
772 /* Common add function */
773 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
774 TCGv arg2, int add_ca, int compute_ca,
775 int compute_ov)
777 TCGv t0, t1;
779 if ((!compute_ca && !compute_ov) ||
780 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
781 t0 = ret;
782 } else {
783 t0 = tcg_temp_local_new();
786 if (add_ca) {
787 t1 = tcg_temp_local_new();
788 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
789 tcg_gen_shri_tl(t1, t1, XER_CA);
790 } else {
791 TCGV_UNUSED(t1);
794 if (compute_ca && compute_ov) {
795 /* Start with XER CA and OV disabled, the most likely case */
796 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
797 } else if (compute_ca) {
798 /* Start with XER CA disabled, the most likely case */
799 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
800 } else if (compute_ov) {
801 /* Start with XER OV disabled, the most likely case */
802 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
805 tcg_gen_add_tl(t0, arg1, arg2);
807 if (compute_ca) {
808 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
810 if (add_ca) {
811 tcg_gen_add_tl(t0, t0, t1);
812 gen_op_arith_compute_ca(ctx, t0, t1, 0);
813 tcg_temp_free(t1);
815 if (compute_ov) {
816 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
819 if (unlikely(Rc(ctx->opcode) != 0))
820 gen_set_Rc0(ctx, t0);
822 if (!TCGV_EQUAL(t0, ret)) {
823 tcg_gen_mov_tl(ret, t0);
824 tcg_temp_free(t0);
827 /* Add functions with two operands */
828 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
829 static void glue(gen_, name)(DisasContext *ctx) \
831 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
832 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
833 add_ca, compute_ca, compute_ov); \
835 /* Add functions with one operand and one immediate */
836 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
837 add_ca, compute_ca, compute_ov) \
838 static void glue(gen_, name)(DisasContext *ctx) \
840 TCGv t0 = tcg_const_local_tl(const_val); \
841 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
842 cpu_gpr[rA(ctx->opcode)], t0, \
843 add_ca, compute_ca, compute_ov); \
844 tcg_temp_free(t0); \
847 /* add add. addo addo. */
848 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
849 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
850 /* addc addc. addco addco. */
851 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
852 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
853 /* adde adde. addeo addeo. */
854 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
855 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
856 /* addme addme. addmeo addmeo. */
857 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
858 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
859 /* addze addze. addzeo addzeo.*/
860 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
861 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
862 /* addi */
863 static void gen_addi(DisasContext *ctx)
865 target_long simm = SIMM(ctx->opcode);
867 if (rA(ctx->opcode) == 0) {
868 /* li case */
869 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
870 } else {
871 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
874 /* addic addic.*/
875 static inline void gen_op_addic(DisasContext *ctx, TCGv ret, TCGv arg1,
876 int compute_Rc0)
878 target_long simm = SIMM(ctx->opcode);
880 /* Start with XER CA and OV disabled, the most likely case */
881 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
883 if (likely(simm != 0)) {
884 TCGv t0 = tcg_temp_local_new();
885 tcg_gen_addi_tl(t0, arg1, simm);
886 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
887 tcg_gen_mov_tl(ret, t0);
888 tcg_temp_free(t0);
889 } else {
890 tcg_gen_mov_tl(ret, arg1);
892 if (compute_Rc0) {
893 gen_set_Rc0(ctx, ret);
897 static void gen_addic(DisasContext *ctx)
899 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
902 static void gen_addic_(DisasContext *ctx)
904 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
907 /* addis */
908 static void gen_addis(DisasContext *ctx)
910 target_long simm = SIMM(ctx->opcode);
912 if (rA(ctx->opcode) == 0) {
913 /* lis case */
914 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
915 } else {
916 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
920 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
921 TCGv arg2, int sign, int compute_ov)
923 int l1 = gen_new_label();
924 int l2 = gen_new_label();
925 TCGv_i32 t0 = tcg_temp_local_new_i32();
926 TCGv_i32 t1 = tcg_temp_local_new_i32();
928 tcg_gen_trunc_tl_i32(t0, arg1);
929 tcg_gen_trunc_tl_i32(t1, arg2);
930 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
931 if (sign) {
932 int l3 = gen_new_label();
933 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
934 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
935 gen_set_label(l3);
936 tcg_gen_div_i32(t0, t0, t1);
937 } else {
938 tcg_gen_divu_i32(t0, t0, t1);
940 if (compute_ov) {
941 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
943 tcg_gen_br(l2);
944 gen_set_label(l1);
945 if (sign) {
946 tcg_gen_sari_i32(t0, t0, 31);
947 } else {
948 tcg_gen_movi_i32(t0, 0);
950 if (compute_ov) {
951 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
953 gen_set_label(l2);
954 tcg_gen_extu_i32_tl(ret, t0);
955 tcg_temp_free_i32(t0);
956 tcg_temp_free_i32(t1);
957 if (unlikely(Rc(ctx->opcode) != 0))
958 gen_set_Rc0(ctx, ret);
960 /* Div functions */
961 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
962 static void glue(gen_, name)(DisasContext *ctx) \
964 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
965 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
966 sign, compute_ov); \
968 /* divwu divwu. divwuo divwuo. */
969 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
970 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
971 /* divw divw. divwo divwo. */
972 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
973 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
974 #if defined(TARGET_PPC64)
975 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
976 TCGv arg2, int sign, int compute_ov)
978 int l1 = gen_new_label();
979 int l2 = gen_new_label();
981 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
982 if (sign) {
983 int l3 = gen_new_label();
984 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
985 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
986 gen_set_label(l3);
987 tcg_gen_div_i64(ret, arg1, arg2);
988 } else {
989 tcg_gen_divu_i64(ret, arg1, arg2);
991 if (compute_ov) {
992 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
994 tcg_gen_br(l2);
995 gen_set_label(l1);
996 if (sign) {
997 tcg_gen_sari_i64(ret, arg1, 63);
998 } else {
999 tcg_gen_movi_i64(ret, 0);
1001 if (compute_ov) {
1002 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1004 gen_set_label(l2);
1005 if (unlikely(Rc(ctx->opcode) != 0))
1006 gen_set_Rc0(ctx, ret);
1008 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1009 static void glue(gen_, name)(DisasContext *ctx) \
1011 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1012 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1013 sign, compute_ov); \
1015 /* divwu divwu. divwuo divwuo. */
1016 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1017 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1018 /* divw divw. divwo divwo. */
1019 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1020 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1021 #endif
1023 /* mulhw mulhw. */
1024 static void gen_mulhw(DisasContext *ctx)
1026 TCGv_i64 t0, t1;
1028 t0 = tcg_temp_new_i64();
1029 t1 = tcg_temp_new_i64();
1030 #if defined(TARGET_PPC64)
1031 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1032 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1033 tcg_gen_mul_i64(t0, t0, t1);
1034 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1035 #else
1036 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1037 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1038 tcg_gen_mul_i64(t0, t0, t1);
1039 tcg_gen_shri_i64(t0, t0, 32);
1040 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1041 #endif
1042 tcg_temp_free_i64(t0);
1043 tcg_temp_free_i64(t1);
1044 if (unlikely(Rc(ctx->opcode) != 0))
1045 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1048 /* mulhwu mulhwu. */
1049 static void gen_mulhwu(DisasContext *ctx)
1051 TCGv_i64 t0, t1;
1053 t0 = tcg_temp_new_i64();
1054 t1 = tcg_temp_new_i64();
1055 #if defined(TARGET_PPC64)
1056 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1057 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1058 tcg_gen_mul_i64(t0, t0, t1);
1059 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1060 #else
1061 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1062 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1063 tcg_gen_mul_i64(t0, t0, t1);
1064 tcg_gen_shri_i64(t0, t0, 32);
1065 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1066 #endif
1067 tcg_temp_free_i64(t0);
1068 tcg_temp_free_i64(t1);
1069 if (unlikely(Rc(ctx->opcode) != 0))
1070 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1073 /* mullw mullw. */
1074 static void gen_mullw(DisasContext *ctx)
1076 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1077 cpu_gpr[rB(ctx->opcode)]);
1078 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1079 if (unlikely(Rc(ctx->opcode) != 0))
1080 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1083 /* mullwo mullwo. */
1084 static void gen_mullwo(DisasContext *ctx)
1086 int l1;
1087 TCGv_i64 t0, t1;
1089 t0 = tcg_temp_new_i64();
1090 t1 = tcg_temp_new_i64();
1091 l1 = gen_new_label();
1092 /* Start with XER OV disabled, the most likely case */
1093 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1094 #if defined(TARGET_PPC64)
1095 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1096 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1097 #else
1098 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1099 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1100 #endif
1101 tcg_gen_mul_i64(t0, t0, t1);
1102 #if defined(TARGET_PPC64)
1103 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1104 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1105 #else
1106 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1107 tcg_gen_ext32s_i64(t1, t0);
1108 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1109 #endif
1110 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1111 gen_set_label(l1);
1112 tcg_temp_free_i64(t0);
1113 tcg_temp_free_i64(t1);
1114 if (unlikely(Rc(ctx->opcode) != 0))
1115 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1118 /* mulli */
1119 static void gen_mulli(DisasContext *ctx)
1121 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1122 SIMM(ctx->opcode));
1124 #if defined(TARGET_PPC64)
1125 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1126 static void glue(gen_, name)(DisasContext *ctx) \
1128 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1129 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1130 if (unlikely(Rc(ctx->opcode) != 0)) \
1131 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1133 /* mulhd mulhd. */
1134 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1135 /* mulhdu mulhdu. */
1136 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1138 /* mulld mulld. */
1139 static void gen_mulld(DisasContext *ctx)
1141 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1142 cpu_gpr[rB(ctx->opcode)]);
1143 if (unlikely(Rc(ctx->opcode) != 0))
1144 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1146 /* mulldo mulldo. */
1147 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1148 #endif
1150 /* neg neg. nego nego. */
1151 static inline void gen_op_arith_neg(DisasContext *ctx, TCGv ret, TCGv arg1,
1152 int ov_check)
1154 int l1 = gen_new_label();
1155 int l2 = gen_new_label();
1156 TCGv t0 = tcg_temp_local_new();
1157 #if defined(TARGET_PPC64)
1158 if (ctx->sf_mode) {
1159 tcg_gen_mov_tl(t0, arg1);
1160 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1161 } else
1162 #endif
1164 tcg_gen_ext32s_tl(t0, arg1);
1165 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1167 tcg_gen_neg_tl(ret, arg1);
1168 if (ov_check) {
1169 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1171 tcg_gen_br(l2);
1172 gen_set_label(l1);
1173 tcg_gen_mov_tl(ret, t0);
1174 if (ov_check) {
1175 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1177 gen_set_label(l2);
1178 tcg_temp_free(t0);
1179 if (unlikely(Rc(ctx->opcode) != 0))
1180 gen_set_Rc0(ctx, ret);
1183 static void gen_neg(DisasContext *ctx)
1185 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1188 static void gen_nego(DisasContext *ctx)
1190 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1193 /* Common subf function */
1194 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1195 TCGv arg2, int add_ca, int compute_ca,
1196 int compute_ov)
1198 TCGv t0, t1;
1200 if ((!compute_ca && !compute_ov) ||
1201 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1202 t0 = ret;
1203 } else {
1204 t0 = tcg_temp_local_new();
1207 if (add_ca) {
1208 t1 = tcg_temp_local_new();
1209 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1210 tcg_gen_shri_tl(t1, t1, XER_CA);
1211 } else {
1212 TCGV_UNUSED(t1);
1215 if (compute_ca && compute_ov) {
1216 /* Start with XER CA and OV disabled, the most likely case */
1217 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1218 } else if (compute_ca) {
1219 /* Start with XER CA disabled, the most likely case */
1220 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1221 } else if (compute_ov) {
1222 /* Start with XER OV disabled, the most likely case */
1223 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1226 if (add_ca) {
1227 tcg_gen_not_tl(t0, arg1);
1228 tcg_gen_add_tl(t0, t0, arg2);
1229 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1230 tcg_gen_add_tl(t0, t0, t1);
1231 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1232 tcg_temp_free(t1);
1233 } else {
1234 tcg_gen_sub_tl(t0, arg2, arg1);
1235 if (compute_ca) {
1236 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1239 if (compute_ov) {
1240 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1243 if (unlikely(Rc(ctx->opcode) != 0))
1244 gen_set_Rc0(ctx, t0);
1246 if (!TCGV_EQUAL(t0, ret)) {
1247 tcg_gen_mov_tl(ret, t0);
1248 tcg_temp_free(t0);
1251 /* Sub functions with Two operands functions */
1252 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1253 static void glue(gen_, name)(DisasContext *ctx) \
1255 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1256 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1257 add_ca, compute_ca, compute_ov); \
1259 /* Sub functions with one operand and one immediate */
1260 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1261 add_ca, compute_ca, compute_ov) \
1262 static void glue(gen_, name)(DisasContext *ctx) \
1264 TCGv t0 = tcg_const_local_tl(const_val); \
1265 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1266 cpu_gpr[rA(ctx->opcode)], t0, \
1267 add_ca, compute_ca, compute_ov); \
1268 tcg_temp_free(t0); \
1270 /* subf subf. subfo subfo. */
1271 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1272 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1273 /* subfc subfc. subfco subfco. */
1274 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1275 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1276 /* subfe subfe. subfeo subfo. */
1277 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1278 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1279 /* subfme subfme. subfmeo subfmeo. */
1280 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1281 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1282 /* subfze subfze. subfzeo subfzeo.*/
1283 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1284 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1286 /* subfic */
1287 static void gen_subfic(DisasContext *ctx)
1289 /* Start with XER CA and OV disabled, the most likely case */
1290 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1291 TCGv t0 = tcg_temp_local_new();
1292 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1293 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1294 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1295 tcg_temp_free(t1);
1296 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1297 tcg_temp_free(t0);
1300 /*** Integer logical ***/
1301 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1302 static void glue(gen_, name)(DisasContext *ctx) \
1304 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1305 cpu_gpr[rB(ctx->opcode)]); \
1306 if (unlikely(Rc(ctx->opcode) != 0)) \
1307 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1310 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1311 static void glue(gen_, name)(DisasContext *ctx) \
1313 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1314 if (unlikely(Rc(ctx->opcode) != 0)) \
1315 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1318 /* and & and. */
1319 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1320 /* andc & andc. */
1321 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1323 /* andi. */
1324 static void gen_andi_(DisasContext *ctx)
1326 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1327 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1330 /* andis. */
1331 static void gen_andis_(DisasContext *ctx)
1333 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1334 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1337 /* cntlzw */
1338 static void gen_cntlzw(DisasContext *ctx)
1340 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1341 if (unlikely(Rc(ctx->opcode) != 0))
1342 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1344 /* eqv & eqv. */
1345 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1346 /* extsb & extsb. */
1347 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1348 /* extsh & extsh. */
1349 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1350 /* nand & nand. */
1351 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1352 /* nor & nor. */
1353 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1355 /* or & or. */
1356 static void gen_or(DisasContext *ctx)
1358 int rs, ra, rb;
1360 rs = rS(ctx->opcode);
1361 ra = rA(ctx->opcode);
1362 rb = rB(ctx->opcode);
1363 /* Optimisation for mr. ri case */
1364 if (rs != ra || rs != rb) {
1365 if (rs != rb)
1366 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1367 else
1368 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1369 if (unlikely(Rc(ctx->opcode) != 0))
1370 gen_set_Rc0(ctx, cpu_gpr[ra]);
1371 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1372 gen_set_Rc0(ctx, cpu_gpr[rs]);
1373 #if defined(TARGET_PPC64)
1374 } else {
1375 int prio = 0;
1377 switch (rs) {
1378 case 1:
1379 /* Set process priority to low */
1380 prio = 2;
1381 break;
1382 case 6:
1383 /* Set process priority to medium-low */
1384 prio = 3;
1385 break;
1386 case 2:
1387 /* Set process priority to normal */
1388 prio = 4;
1389 break;
1390 #if !defined(CONFIG_USER_ONLY)
1391 case 31:
1392 if (ctx->mem_idx > 0) {
1393 /* Set process priority to very low */
1394 prio = 1;
1396 break;
1397 case 5:
1398 if (ctx->mem_idx > 0) {
1399 /* Set process priority to medium-hight */
1400 prio = 5;
1402 break;
1403 case 3:
1404 if (ctx->mem_idx > 0) {
1405 /* Set process priority to high */
1406 prio = 6;
1408 break;
1409 case 7:
1410 if (ctx->mem_idx > 1) {
1411 /* Set process priority to very high */
1412 prio = 7;
1414 break;
1415 #endif
1416 default:
1417 /* nop */
1418 break;
1420 if (prio) {
1421 TCGv t0 = tcg_temp_new();
1422 gen_load_spr(t0, SPR_PPR);
1423 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1424 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1425 gen_store_spr(SPR_PPR, t0);
1426 tcg_temp_free(t0);
1428 #endif
1431 /* orc & orc. */
1432 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1434 /* xor & xor. */
1435 static void gen_xor(DisasContext *ctx)
1437 /* Optimisation for "set to zero" case */
1438 if (rS(ctx->opcode) != rB(ctx->opcode))
1439 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1440 else
1441 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1442 if (unlikely(Rc(ctx->opcode) != 0))
1443 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1446 /* ori */
1447 static void gen_ori(DisasContext *ctx)
1449 target_ulong uimm = UIMM(ctx->opcode);
1451 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1452 /* NOP */
1453 /* XXX: should handle special NOPs for POWER series */
1454 return;
1456 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1459 /* oris */
1460 static void gen_oris(DisasContext *ctx)
1462 target_ulong uimm = UIMM(ctx->opcode);
1464 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1465 /* NOP */
1466 return;
1468 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1471 /* xori */
1472 static void gen_xori(DisasContext *ctx)
1474 target_ulong uimm = UIMM(ctx->opcode);
1476 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1477 /* NOP */
1478 return;
1480 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1483 /* xoris */
1484 static void gen_xoris(DisasContext *ctx)
1486 target_ulong uimm = UIMM(ctx->opcode);
1488 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1489 /* NOP */
1490 return;
1492 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1495 /* popcntb : PowerPC 2.03 specification */
1496 static void gen_popcntb(DisasContext *ctx)
1498 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1501 static void gen_popcntw(DisasContext *ctx)
1503 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1506 #if defined(TARGET_PPC64)
1507 /* popcntd: PowerPC 2.06 specification */
1508 static void gen_popcntd(DisasContext *ctx)
1510 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1512 #endif
1514 #if defined(TARGET_PPC64)
1515 /* extsw & extsw. */
1516 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1518 /* cntlzd */
1519 static void gen_cntlzd(DisasContext *ctx)
1521 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1522 if (unlikely(Rc(ctx->opcode) != 0))
1523 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1525 #endif
1527 /*** Integer rotate ***/
1529 /* rlwimi & rlwimi. */
1530 static void gen_rlwimi(DisasContext *ctx)
1532 uint32_t mb, me, sh;
1534 mb = MB(ctx->opcode);
1535 me = ME(ctx->opcode);
1536 sh = SH(ctx->opcode);
1537 if (likely(sh == 0 && mb == 0 && me == 31)) {
1538 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1539 } else {
1540 target_ulong mask;
1541 TCGv t1;
1542 TCGv t0 = tcg_temp_new();
1543 #if defined(TARGET_PPC64)
1544 TCGv_i32 t2 = tcg_temp_new_i32();
1545 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1546 tcg_gen_rotli_i32(t2, t2, sh);
1547 tcg_gen_extu_i32_i64(t0, t2);
1548 tcg_temp_free_i32(t2);
1549 #else
1550 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1551 #endif
1552 #if defined(TARGET_PPC64)
1553 mb += 32;
1554 me += 32;
1555 #endif
1556 mask = MASK(mb, me);
1557 t1 = tcg_temp_new();
1558 tcg_gen_andi_tl(t0, t0, mask);
1559 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1560 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1561 tcg_temp_free(t0);
1562 tcg_temp_free(t1);
1564 if (unlikely(Rc(ctx->opcode) != 0))
1565 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1568 /* rlwinm & rlwinm. */
1569 static void gen_rlwinm(DisasContext *ctx)
1571 uint32_t mb, me, sh;
1573 sh = SH(ctx->opcode);
1574 mb = MB(ctx->opcode);
1575 me = ME(ctx->opcode);
1577 if (likely(mb == 0 && me == (31 - sh))) {
1578 if (likely(sh == 0)) {
1579 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1580 } else {
1581 TCGv t0 = tcg_temp_new();
1582 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1583 tcg_gen_shli_tl(t0, t0, sh);
1584 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1585 tcg_temp_free(t0);
1587 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1588 TCGv t0 = tcg_temp_new();
1589 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1590 tcg_gen_shri_tl(t0, t0, mb);
1591 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1592 tcg_temp_free(t0);
1593 } else {
1594 TCGv t0 = tcg_temp_new();
1595 #if defined(TARGET_PPC64)
1596 TCGv_i32 t1 = tcg_temp_new_i32();
1597 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1598 tcg_gen_rotli_i32(t1, t1, sh);
1599 tcg_gen_extu_i32_i64(t0, t1);
1600 tcg_temp_free_i32(t1);
1601 #else
1602 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1603 #endif
1604 #if defined(TARGET_PPC64)
1605 mb += 32;
1606 me += 32;
1607 #endif
1608 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1609 tcg_temp_free(t0);
1611 if (unlikely(Rc(ctx->opcode) != 0))
1612 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1615 /* rlwnm & rlwnm. */
1616 static void gen_rlwnm(DisasContext *ctx)
1618 uint32_t mb, me;
1619 TCGv t0;
1620 #if defined(TARGET_PPC64)
1621 TCGv_i32 t1, t2;
1622 #endif
1624 mb = MB(ctx->opcode);
1625 me = ME(ctx->opcode);
1626 t0 = tcg_temp_new();
1627 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1628 #if defined(TARGET_PPC64)
1629 t1 = tcg_temp_new_i32();
1630 t2 = tcg_temp_new_i32();
1631 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1632 tcg_gen_trunc_i64_i32(t2, t0);
1633 tcg_gen_rotl_i32(t1, t1, t2);
1634 tcg_gen_extu_i32_i64(t0, t1);
1635 tcg_temp_free_i32(t1);
1636 tcg_temp_free_i32(t2);
1637 #else
1638 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1639 #endif
1640 if (unlikely(mb != 0 || me != 31)) {
1641 #if defined(TARGET_PPC64)
1642 mb += 32;
1643 me += 32;
1644 #endif
1645 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1646 } else {
1647 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1649 tcg_temp_free(t0);
1650 if (unlikely(Rc(ctx->opcode) != 0))
1651 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1654 #if defined(TARGET_PPC64)
1655 #define GEN_PPC64_R2(name, opc1, opc2) \
1656 static void glue(gen_, name##0)(DisasContext *ctx) \
1658 gen_##name(ctx, 0); \
1661 static void glue(gen_, name##1)(DisasContext *ctx) \
1663 gen_##name(ctx, 1); \
1665 #define GEN_PPC64_R4(name, opc1, opc2) \
1666 static void glue(gen_, name##0)(DisasContext *ctx) \
1668 gen_##name(ctx, 0, 0); \
1671 static void glue(gen_, name##1)(DisasContext *ctx) \
1673 gen_##name(ctx, 0, 1); \
1676 static void glue(gen_, name##2)(DisasContext *ctx) \
1678 gen_##name(ctx, 1, 0); \
1681 static void glue(gen_, name##3)(DisasContext *ctx) \
1683 gen_##name(ctx, 1, 1); \
1686 static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1687 uint32_t sh)
1689 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1690 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1691 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1692 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1693 } else {
1694 TCGv t0 = tcg_temp_new();
1695 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1696 if (likely(mb == 0 && me == 63)) {
1697 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1698 } else {
1699 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1701 tcg_temp_free(t0);
1703 if (unlikely(Rc(ctx->opcode) != 0))
1704 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1706 /* rldicl - rldicl. */
1707 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1709 uint32_t sh, mb;
1711 sh = SH(ctx->opcode) | (shn << 5);
1712 mb = MB(ctx->opcode) | (mbn << 5);
1713 gen_rldinm(ctx, mb, 63, sh);
1715 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1716 /* rldicr - rldicr. */
1717 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1719 uint32_t sh, me;
1721 sh = SH(ctx->opcode) | (shn << 5);
1722 me = MB(ctx->opcode) | (men << 5);
1723 gen_rldinm(ctx, 0, me, sh);
1725 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1726 /* rldic - rldic. */
1727 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1729 uint32_t sh, mb;
1731 sh = SH(ctx->opcode) | (shn << 5);
1732 mb = MB(ctx->opcode) | (mbn << 5);
1733 gen_rldinm(ctx, mb, 63 - sh, sh);
1735 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1737 static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1739 TCGv t0;
1741 mb = MB(ctx->opcode);
1742 me = ME(ctx->opcode);
1743 t0 = tcg_temp_new();
1744 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1745 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1746 if (unlikely(mb != 0 || me != 63)) {
1747 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1748 } else {
1749 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1751 tcg_temp_free(t0);
1752 if (unlikely(Rc(ctx->opcode) != 0))
1753 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1756 /* rldcl - rldcl. */
1757 static inline void gen_rldcl(DisasContext *ctx, int mbn)
1759 uint32_t mb;
1761 mb = MB(ctx->opcode) | (mbn << 5);
1762 gen_rldnm(ctx, mb, 63);
1764 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1765 /* rldcr - rldcr. */
1766 static inline void gen_rldcr(DisasContext *ctx, int men)
1768 uint32_t me;
1770 me = MB(ctx->opcode) | (men << 5);
1771 gen_rldnm(ctx, 0, me);
1773 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1774 /* rldimi - rldimi. */
1775 static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1777 uint32_t sh, mb, me;
1779 sh = SH(ctx->opcode) | (shn << 5);
1780 mb = MB(ctx->opcode) | (mbn << 5);
1781 me = 63 - sh;
1782 if (unlikely(sh == 0 && mb == 0)) {
1783 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1784 } else {
1785 TCGv t0, t1;
1786 target_ulong mask;
1788 t0 = tcg_temp_new();
1789 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1790 t1 = tcg_temp_new();
1791 mask = MASK(mb, me);
1792 tcg_gen_andi_tl(t0, t0, mask);
1793 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1794 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1795 tcg_temp_free(t0);
1796 tcg_temp_free(t1);
1798 if (unlikely(Rc(ctx->opcode) != 0))
1799 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1801 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1802 #endif
1804 /*** Integer shift ***/
1806 /* slw & slw. */
1807 static void gen_slw(DisasContext *ctx)
1809 TCGv t0, t1;
1811 t0 = tcg_temp_new();
1812 /* AND rS with a mask that is 0 when rB >= 0x20 */
1813 #if defined(TARGET_PPC64)
1814 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1815 tcg_gen_sari_tl(t0, t0, 0x3f);
1816 #else
1817 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1818 tcg_gen_sari_tl(t0, t0, 0x1f);
1819 #endif
1820 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1821 t1 = tcg_temp_new();
1822 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1823 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1824 tcg_temp_free(t1);
1825 tcg_temp_free(t0);
1826 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1827 if (unlikely(Rc(ctx->opcode) != 0))
1828 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1831 /* sraw & sraw. */
1832 static void gen_sraw(DisasContext *ctx)
1834 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1835 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1836 if (unlikely(Rc(ctx->opcode) != 0))
1837 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1840 /* srawi & srawi. */
1841 static void gen_srawi(DisasContext *ctx)
1843 int sh = SH(ctx->opcode);
1844 if (sh != 0) {
1845 int l1, l2;
1846 TCGv t0;
1847 l1 = gen_new_label();
1848 l2 = gen_new_label();
1849 t0 = tcg_temp_local_new();
1850 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1851 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1852 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1853 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1854 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1855 tcg_gen_br(l2);
1856 gen_set_label(l1);
1857 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1858 gen_set_label(l2);
1859 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1860 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1861 tcg_temp_free(t0);
1862 } else {
1863 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1864 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1866 if (unlikely(Rc(ctx->opcode) != 0))
1867 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1870 /* srw & srw. */
1871 static void gen_srw(DisasContext *ctx)
1873 TCGv t0, t1;
1875 t0 = tcg_temp_new();
1876 /* AND rS with a mask that is 0 when rB >= 0x20 */
1877 #if defined(TARGET_PPC64)
1878 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1879 tcg_gen_sari_tl(t0, t0, 0x3f);
1880 #else
1881 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1882 tcg_gen_sari_tl(t0, t0, 0x1f);
1883 #endif
1884 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1885 tcg_gen_ext32u_tl(t0, t0);
1886 t1 = tcg_temp_new();
1887 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1888 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1889 tcg_temp_free(t1);
1890 tcg_temp_free(t0);
1891 if (unlikely(Rc(ctx->opcode) != 0))
1892 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1895 #if defined(TARGET_PPC64)
1896 /* sld & sld. */
1897 static void gen_sld(DisasContext *ctx)
1899 TCGv t0, t1;
1901 t0 = tcg_temp_new();
1902 /* AND rS with a mask that is 0 when rB >= 0x40 */
1903 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1904 tcg_gen_sari_tl(t0, t0, 0x3f);
1905 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1906 t1 = tcg_temp_new();
1907 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1908 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1909 tcg_temp_free(t1);
1910 tcg_temp_free(t0);
1911 if (unlikely(Rc(ctx->opcode) != 0))
1912 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1915 /* srad & srad. */
1916 static void gen_srad(DisasContext *ctx)
1918 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
1919 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1920 if (unlikely(Rc(ctx->opcode) != 0))
1921 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1923 /* sradi & sradi. */
1924 static inline void gen_sradi(DisasContext *ctx, int n)
1926 int sh = SH(ctx->opcode) + (n << 5);
1927 if (sh != 0) {
1928 int l1, l2;
1929 TCGv t0;
1930 l1 = gen_new_label();
1931 l2 = gen_new_label();
1932 t0 = tcg_temp_local_new();
1933 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
1934 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1935 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1936 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1937 tcg_gen_br(l2);
1938 gen_set_label(l1);
1939 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1940 gen_set_label(l2);
1941 tcg_temp_free(t0);
1942 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1943 } else {
1944 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1945 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1947 if (unlikely(Rc(ctx->opcode) != 0))
1948 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1951 static void gen_sradi0(DisasContext *ctx)
1953 gen_sradi(ctx, 0);
1956 static void gen_sradi1(DisasContext *ctx)
1958 gen_sradi(ctx, 1);
1961 /* srd & srd. */
1962 static void gen_srd(DisasContext *ctx)
1964 TCGv t0, t1;
1966 t0 = tcg_temp_new();
1967 /* AND rS with a mask that is 0 when rB >= 0x40 */
1968 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1969 tcg_gen_sari_tl(t0, t0, 0x3f);
1970 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1971 t1 = tcg_temp_new();
1972 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1973 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1974 tcg_temp_free(t1);
1975 tcg_temp_free(t0);
1976 if (unlikely(Rc(ctx->opcode) != 0))
1977 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1979 #endif
1981 /*** Floating-Point arithmetic ***/
1982 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1983 static void gen_f##name(DisasContext *ctx) \
1985 if (unlikely(!ctx->fpu_enabled)) { \
1986 gen_exception(ctx, POWERPC_EXCP_FPU); \
1987 return; \
1989 /* NIP cannot be restored if the memory exception comes from an helper */ \
1990 gen_update_nip(ctx, ctx->nip - 4); \
1991 gen_reset_fpstatus(); \
1992 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
1993 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1994 if (isfloat) { \
1995 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
1997 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
1998 Rc(ctx->opcode) != 0); \
2001 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2002 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2003 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2005 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2006 static void gen_f##name(DisasContext *ctx) \
2008 if (unlikely(!ctx->fpu_enabled)) { \
2009 gen_exception(ctx, POWERPC_EXCP_FPU); \
2010 return; \
2012 /* NIP cannot be restored if the memory exception comes from an helper */ \
2013 gen_update_nip(ctx, ctx->nip - 4); \
2014 gen_reset_fpstatus(); \
2015 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2016 cpu_fpr[rB(ctx->opcode)]); \
2017 if (isfloat) { \
2018 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2020 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2021 set_fprf, Rc(ctx->opcode) != 0); \
2023 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2024 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2025 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2027 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2028 static void gen_f##name(DisasContext *ctx) \
2030 if (unlikely(!ctx->fpu_enabled)) { \
2031 gen_exception(ctx, POWERPC_EXCP_FPU); \
2032 return; \
2034 /* NIP cannot be restored if the memory exception comes from an helper */ \
2035 gen_update_nip(ctx, ctx->nip - 4); \
2036 gen_reset_fpstatus(); \
2037 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2038 cpu_fpr[rC(ctx->opcode)]); \
2039 if (isfloat) { \
2040 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2042 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2043 set_fprf, Rc(ctx->opcode) != 0); \
2045 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2046 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2047 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2049 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2050 static void gen_f##name(DisasContext *ctx) \
2052 if (unlikely(!ctx->fpu_enabled)) { \
2053 gen_exception(ctx, POWERPC_EXCP_FPU); \
2054 return; \
2056 /* NIP cannot be restored if the memory exception comes from an helper */ \
2057 gen_update_nip(ctx, ctx->nip - 4); \
2058 gen_reset_fpstatus(); \
2059 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2060 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2061 set_fprf, Rc(ctx->opcode) != 0); \
2064 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2065 static void gen_f##name(DisasContext *ctx) \
2067 if (unlikely(!ctx->fpu_enabled)) { \
2068 gen_exception(ctx, POWERPC_EXCP_FPU); \
2069 return; \
2071 /* NIP cannot be restored if the memory exception comes from an helper */ \
2072 gen_update_nip(ctx, ctx->nip - 4); \
2073 gen_reset_fpstatus(); \
2074 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2075 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2076 set_fprf, Rc(ctx->opcode) != 0); \
2079 /* fadd - fadds */
2080 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2081 /* fdiv - fdivs */
2082 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2083 /* fmul - fmuls */
2084 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2086 /* fre */
2087 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2089 /* fres */
2090 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2092 /* frsqrte */
2093 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2095 /* frsqrtes */
2096 static void gen_frsqrtes(DisasContext *ctx)
2098 if (unlikely(!ctx->fpu_enabled)) {
2099 gen_exception(ctx, POWERPC_EXCP_FPU);
2100 return;
2102 /* NIP cannot be restored if the memory exception comes from an helper */
2103 gen_update_nip(ctx, ctx->nip - 4);
2104 gen_reset_fpstatus();
2105 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2106 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2107 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2110 /* fsel */
2111 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2112 /* fsub - fsubs */
2113 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2114 /* Optional: */
2116 /* fsqrt */
2117 static void gen_fsqrt(DisasContext *ctx)
2119 if (unlikely(!ctx->fpu_enabled)) {
2120 gen_exception(ctx, POWERPC_EXCP_FPU);
2121 return;
2123 /* NIP cannot be restored if the memory exception comes from an helper */
2124 gen_update_nip(ctx, ctx->nip - 4);
2125 gen_reset_fpstatus();
2126 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2127 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2130 static void gen_fsqrts(DisasContext *ctx)
2132 if (unlikely(!ctx->fpu_enabled)) {
2133 gen_exception(ctx, POWERPC_EXCP_FPU);
2134 return;
2136 /* NIP cannot be restored if the memory exception comes from an helper */
2137 gen_update_nip(ctx, ctx->nip - 4);
2138 gen_reset_fpstatus();
2139 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2140 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2141 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2144 /*** Floating-Point multiply-and-add ***/
2145 /* fmadd - fmadds */
2146 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2147 /* fmsub - fmsubs */
2148 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2149 /* fnmadd - fnmadds */
2150 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2151 /* fnmsub - fnmsubs */
2152 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2154 /*** Floating-Point round & convert ***/
2155 /* fctiw */
2156 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2157 /* fctiwz */
2158 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2159 /* frsp */
2160 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2161 #if defined(TARGET_PPC64)
2162 /* fcfid */
2163 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2164 /* fctid */
2165 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2166 /* fctidz */
2167 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2168 #endif
2170 /* frin */
2171 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2172 /* friz */
2173 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2174 /* frip */
2175 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2176 /* frim */
2177 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2179 /*** Floating-Point compare ***/
2181 /* fcmpo */
2182 static void gen_fcmpo(DisasContext *ctx)
2184 TCGv_i32 crf;
2185 if (unlikely(!ctx->fpu_enabled)) {
2186 gen_exception(ctx, POWERPC_EXCP_FPU);
2187 return;
2189 /* NIP cannot be restored if the memory exception comes from an helper */
2190 gen_update_nip(ctx, ctx->nip - 4);
2191 gen_reset_fpstatus();
2192 crf = tcg_const_i32(crfD(ctx->opcode));
2193 gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2194 tcg_temp_free_i32(crf);
2195 gen_helper_float_check_status();
2198 /* fcmpu */
2199 static void gen_fcmpu(DisasContext *ctx)
2201 TCGv_i32 crf;
2202 if (unlikely(!ctx->fpu_enabled)) {
2203 gen_exception(ctx, POWERPC_EXCP_FPU);
2204 return;
2206 /* NIP cannot be restored if the memory exception comes from an helper */
2207 gen_update_nip(ctx, ctx->nip - 4);
2208 gen_reset_fpstatus();
2209 crf = tcg_const_i32(crfD(ctx->opcode));
2210 gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf);
2211 tcg_temp_free_i32(crf);
2212 gen_helper_float_check_status();
2215 /*** Floating-point move ***/
2216 /* fabs */
2217 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2218 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2220 /* fmr - fmr. */
2221 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2222 static void gen_fmr(DisasContext *ctx)
2224 if (unlikely(!ctx->fpu_enabled)) {
2225 gen_exception(ctx, POWERPC_EXCP_FPU);
2226 return;
2228 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2229 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2232 /* fnabs */
2233 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2234 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2235 /* fneg */
2236 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2237 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2239 /*** Floating-Point status & ctrl register ***/
2241 /* mcrfs */
2242 static void gen_mcrfs(DisasContext *ctx)
2244 int bfa;
2246 if (unlikely(!ctx->fpu_enabled)) {
2247 gen_exception(ctx, POWERPC_EXCP_FPU);
2248 return;
2250 bfa = 4 * (7 - crfS(ctx->opcode));
2251 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2252 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2253 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2256 /* mffs */
2257 static void gen_mffs(DisasContext *ctx)
2259 if (unlikely(!ctx->fpu_enabled)) {
2260 gen_exception(ctx, POWERPC_EXCP_FPU);
2261 return;
2263 gen_reset_fpstatus();
2264 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2265 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2268 /* mtfsb0 */
2269 static void gen_mtfsb0(DisasContext *ctx)
2271 uint8_t crb;
2273 if (unlikely(!ctx->fpu_enabled)) {
2274 gen_exception(ctx, POWERPC_EXCP_FPU);
2275 return;
2277 crb = 31 - crbD(ctx->opcode);
2278 gen_reset_fpstatus();
2279 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2280 TCGv_i32 t0;
2281 /* NIP cannot be restored if the memory exception comes from an helper */
2282 gen_update_nip(ctx, ctx->nip - 4);
2283 t0 = tcg_const_i32(crb);
2284 gen_helper_fpscr_clrbit(t0);
2285 tcg_temp_free_i32(t0);
2287 if (unlikely(Rc(ctx->opcode) != 0)) {
2288 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2292 /* mtfsb1 */
2293 static void gen_mtfsb1(DisasContext *ctx)
2295 uint8_t crb;
2297 if (unlikely(!ctx->fpu_enabled)) {
2298 gen_exception(ctx, POWERPC_EXCP_FPU);
2299 return;
2301 crb = 31 - crbD(ctx->opcode);
2302 gen_reset_fpstatus();
2303 /* XXX: we pretend we can only do IEEE floating-point computations */
2304 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2305 TCGv_i32 t0;
2306 /* NIP cannot be restored if the memory exception comes from an helper */
2307 gen_update_nip(ctx, ctx->nip - 4);
2308 t0 = tcg_const_i32(crb);
2309 gen_helper_fpscr_setbit(t0);
2310 tcg_temp_free_i32(t0);
2312 if (unlikely(Rc(ctx->opcode) != 0)) {
2313 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2315 /* We can raise a differed exception */
2316 gen_helper_float_check_status();
2319 /* mtfsf */
2320 static void gen_mtfsf(DisasContext *ctx)
2322 TCGv_i32 t0;
2323 int L = ctx->opcode & 0x02000000;
2325 if (unlikely(!ctx->fpu_enabled)) {
2326 gen_exception(ctx, POWERPC_EXCP_FPU);
2327 return;
2329 /* NIP cannot be restored if the memory exception comes from an helper */
2330 gen_update_nip(ctx, ctx->nip - 4);
2331 gen_reset_fpstatus();
2332 if (L)
2333 t0 = tcg_const_i32(0xff);
2334 else
2335 t0 = tcg_const_i32(FM(ctx->opcode));
2336 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2337 tcg_temp_free_i32(t0);
2338 if (unlikely(Rc(ctx->opcode) != 0)) {
2339 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2341 /* We can raise a differed exception */
2342 gen_helper_float_check_status();
2345 /* mtfsfi */
2346 static void gen_mtfsfi(DisasContext *ctx)
2348 int bf, sh;
2349 TCGv_i64 t0;
2350 TCGv_i32 t1;
2352 if (unlikely(!ctx->fpu_enabled)) {
2353 gen_exception(ctx, POWERPC_EXCP_FPU);
2354 return;
2356 bf = crbD(ctx->opcode) >> 2;
2357 sh = 7 - bf;
2358 /* NIP cannot be restored if the memory exception comes from an helper */
2359 gen_update_nip(ctx, ctx->nip - 4);
2360 gen_reset_fpstatus();
2361 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2362 t1 = tcg_const_i32(1 << sh);
2363 gen_helper_store_fpscr(t0, t1);
2364 tcg_temp_free_i64(t0);
2365 tcg_temp_free_i32(t1);
2366 if (unlikely(Rc(ctx->opcode) != 0)) {
2367 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2369 /* We can raise a differed exception */
2370 gen_helper_float_check_status();
2373 /*** Addressing modes ***/
2374 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2375 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2376 target_long maskl)
2378 target_long simm = SIMM(ctx->opcode);
2380 simm &= ~maskl;
2381 if (rA(ctx->opcode) == 0) {
2382 #if defined(TARGET_PPC64)
2383 if (!ctx->sf_mode) {
2384 tcg_gen_movi_tl(EA, (uint32_t)simm);
2385 } else
2386 #endif
2387 tcg_gen_movi_tl(EA, simm);
2388 } else if (likely(simm != 0)) {
2389 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2390 #if defined(TARGET_PPC64)
2391 if (!ctx->sf_mode) {
2392 tcg_gen_ext32u_tl(EA, EA);
2394 #endif
2395 } else {
2396 #if defined(TARGET_PPC64)
2397 if (!ctx->sf_mode) {
2398 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2399 } else
2400 #endif
2401 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2405 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2407 if (rA(ctx->opcode) == 0) {
2408 #if defined(TARGET_PPC64)
2409 if (!ctx->sf_mode) {
2410 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2411 } else
2412 #endif
2413 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2414 } else {
2415 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2416 #if defined(TARGET_PPC64)
2417 if (!ctx->sf_mode) {
2418 tcg_gen_ext32u_tl(EA, EA);
2420 #endif
2424 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2426 if (rA(ctx->opcode) == 0) {
2427 tcg_gen_movi_tl(EA, 0);
2428 } else {
2429 #if defined(TARGET_PPC64)
2430 if (!ctx->sf_mode) {
2431 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2432 } else
2433 #endif
2434 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2438 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2439 target_long val)
2441 tcg_gen_addi_tl(ret, arg1, val);
2442 #if defined(TARGET_PPC64)
2443 if (!ctx->sf_mode) {
2444 tcg_gen_ext32u_tl(ret, ret);
2446 #endif
2449 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2451 int l1 = gen_new_label();
2452 TCGv t0 = tcg_temp_new();
2453 TCGv_i32 t1, t2;
2454 /* NIP cannot be restored if the memory exception comes from an helper */
2455 gen_update_nip(ctx, ctx->nip - 4);
2456 tcg_gen_andi_tl(t0, EA, mask);
2457 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2458 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2459 t2 = tcg_const_i32(0);
2460 gen_helper_raise_exception_err(t1, t2);
2461 tcg_temp_free_i32(t1);
2462 tcg_temp_free_i32(t2);
2463 gen_set_label(l1);
2464 tcg_temp_free(t0);
2467 /*** Integer load ***/
2468 static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2470 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2473 static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2475 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2478 static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2480 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2481 if (unlikely(ctx->le_mode)) {
2482 tcg_gen_bswap16_tl(arg1, arg1);
2486 static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2488 if (unlikely(ctx->le_mode)) {
2489 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2490 tcg_gen_bswap16_tl(arg1, arg1);
2491 tcg_gen_ext16s_tl(arg1, arg1);
2492 } else {
2493 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2497 static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2499 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2500 if (unlikely(ctx->le_mode)) {
2501 tcg_gen_bswap32_tl(arg1, arg1);
2505 #if defined(TARGET_PPC64)
2506 static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2508 if (unlikely(ctx->le_mode)) {
2509 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2510 tcg_gen_bswap32_tl(arg1, arg1);
2511 tcg_gen_ext32s_tl(arg1, arg1);
2512 } else
2513 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2515 #endif
2517 static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2519 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2520 if (unlikely(ctx->le_mode)) {
2521 tcg_gen_bswap64_i64(arg1, arg1);
2525 static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2527 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2530 static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2532 if (unlikely(ctx->le_mode)) {
2533 TCGv t0 = tcg_temp_new();
2534 tcg_gen_ext16u_tl(t0, arg1);
2535 tcg_gen_bswap16_tl(t0, t0);
2536 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2537 tcg_temp_free(t0);
2538 } else {
2539 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2543 static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2545 if (unlikely(ctx->le_mode)) {
2546 TCGv t0 = tcg_temp_new();
2547 tcg_gen_ext32u_tl(t0, arg1);
2548 tcg_gen_bswap32_tl(t0, t0);
2549 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2550 tcg_temp_free(t0);
2551 } else {
2552 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2556 static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2558 if (unlikely(ctx->le_mode)) {
2559 TCGv_i64 t0 = tcg_temp_new_i64();
2560 tcg_gen_bswap64_i64(t0, arg1);
2561 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2562 tcg_temp_free_i64(t0);
2563 } else
2564 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2567 #define GEN_LD(name, ldop, opc, type) \
2568 static void glue(gen_, name)(DisasContext *ctx) \
2570 TCGv EA; \
2571 gen_set_access_type(ctx, ACCESS_INT); \
2572 EA = tcg_temp_new(); \
2573 gen_addr_imm_index(ctx, EA, 0); \
2574 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2575 tcg_temp_free(EA); \
2578 #define GEN_LDU(name, ldop, opc, type) \
2579 static void glue(gen_, name##u)(DisasContext *ctx) \
2581 TCGv EA; \
2582 if (unlikely(rA(ctx->opcode) == 0 || \
2583 rA(ctx->opcode) == rD(ctx->opcode))) { \
2584 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2585 return; \
2587 gen_set_access_type(ctx, ACCESS_INT); \
2588 EA = tcg_temp_new(); \
2589 if (type == PPC_64B) \
2590 gen_addr_imm_index(ctx, EA, 0x03); \
2591 else \
2592 gen_addr_imm_index(ctx, EA, 0); \
2593 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2594 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2595 tcg_temp_free(EA); \
2598 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2599 static void glue(gen_, name##ux)(DisasContext *ctx) \
2601 TCGv EA; \
2602 if (unlikely(rA(ctx->opcode) == 0 || \
2603 rA(ctx->opcode) == rD(ctx->opcode))) { \
2604 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2605 return; \
2607 gen_set_access_type(ctx, ACCESS_INT); \
2608 EA = tcg_temp_new(); \
2609 gen_addr_reg_index(ctx, EA); \
2610 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2611 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2612 tcg_temp_free(EA); \
2615 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2616 static void glue(gen_, name##x)(DisasContext *ctx) \
2618 TCGv EA; \
2619 gen_set_access_type(ctx, ACCESS_INT); \
2620 EA = tcg_temp_new(); \
2621 gen_addr_reg_index(ctx, EA); \
2622 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2623 tcg_temp_free(EA); \
2626 #define GEN_LDS(name, ldop, op, type) \
2627 GEN_LD(name, ldop, op | 0x20, type); \
2628 GEN_LDU(name, ldop, op | 0x21, type); \
2629 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2630 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2632 /* lbz lbzu lbzux lbzx */
2633 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2634 /* lha lhau lhaux lhax */
2635 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2636 /* lhz lhzu lhzux lhzx */
2637 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2638 /* lwz lwzu lwzux lwzx */
2639 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2640 #if defined(TARGET_PPC64)
2641 /* lwaux */
2642 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2643 /* lwax */
2644 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2645 /* ldux */
2646 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2647 /* ldx */
2648 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2650 static void gen_ld(DisasContext *ctx)
2652 TCGv EA;
2653 if (Rc(ctx->opcode)) {
2654 if (unlikely(rA(ctx->opcode) == 0 ||
2655 rA(ctx->opcode) == rD(ctx->opcode))) {
2656 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2657 return;
2660 gen_set_access_type(ctx, ACCESS_INT);
2661 EA = tcg_temp_new();
2662 gen_addr_imm_index(ctx, EA, 0x03);
2663 if (ctx->opcode & 0x02) {
2664 /* lwa (lwau is undefined) */
2665 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2666 } else {
2667 /* ld - ldu */
2668 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2670 if (Rc(ctx->opcode))
2671 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2672 tcg_temp_free(EA);
2675 /* lq */
2676 static void gen_lq(DisasContext *ctx)
2678 #if defined(CONFIG_USER_ONLY)
2679 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2680 #else
2681 int ra, rd;
2682 TCGv EA;
2684 /* Restore CPU state */
2685 if (unlikely(ctx->mem_idx == 0)) {
2686 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2687 return;
2689 ra = rA(ctx->opcode);
2690 rd = rD(ctx->opcode);
2691 if (unlikely((rd & 1) || rd == ra)) {
2692 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2693 return;
2695 if (unlikely(ctx->le_mode)) {
2696 /* Little-endian mode is not handled */
2697 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2698 return;
2700 gen_set_access_type(ctx, ACCESS_INT);
2701 EA = tcg_temp_new();
2702 gen_addr_imm_index(ctx, EA, 0x0F);
2703 gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2704 gen_addr_add(ctx, EA, EA, 8);
2705 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2706 tcg_temp_free(EA);
2707 #endif
2709 #endif
2711 /*** Integer store ***/
2712 #define GEN_ST(name, stop, opc, type) \
2713 static void glue(gen_, name)(DisasContext *ctx) \
2715 TCGv EA; \
2716 gen_set_access_type(ctx, ACCESS_INT); \
2717 EA = tcg_temp_new(); \
2718 gen_addr_imm_index(ctx, EA, 0); \
2719 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2720 tcg_temp_free(EA); \
2723 #define GEN_STU(name, stop, opc, type) \
2724 static void glue(gen_, stop##u)(DisasContext *ctx) \
2726 TCGv EA; \
2727 if (unlikely(rA(ctx->opcode) == 0)) { \
2728 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2729 return; \
2731 gen_set_access_type(ctx, ACCESS_INT); \
2732 EA = tcg_temp_new(); \
2733 if (type == PPC_64B) \
2734 gen_addr_imm_index(ctx, EA, 0x03); \
2735 else \
2736 gen_addr_imm_index(ctx, EA, 0); \
2737 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2738 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2739 tcg_temp_free(EA); \
2742 #define GEN_STUX(name, stop, opc2, opc3, type) \
2743 static void glue(gen_, name##ux)(DisasContext *ctx) \
2745 TCGv EA; \
2746 if (unlikely(rA(ctx->opcode) == 0)) { \
2747 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2748 return; \
2750 gen_set_access_type(ctx, ACCESS_INT); \
2751 EA = tcg_temp_new(); \
2752 gen_addr_reg_index(ctx, EA); \
2753 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2754 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2755 tcg_temp_free(EA); \
2758 #define GEN_STX(name, stop, opc2, opc3, type) \
2759 static void glue(gen_, name##x)(DisasContext *ctx) \
2761 TCGv EA; \
2762 gen_set_access_type(ctx, ACCESS_INT); \
2763 EA = tcg_temp_new(); \
2764 gen_addr_reg_index(ctx, EA); \
2765 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2766 tcg_temp_free(EA); \
2769 #define GEN_STS(name, stop, op, type) \
2770 GEN_ST(name, stop, op | 0x20, type); \
2771 GEN_STU(name, stop, op | 0x21, type); \
2772 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2773 GEN_STX(name, stop, 0x17, op | 0x00, type)
2775 /* stb stbu stbux stbx */
2776 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2777 /* sth sthu sthux sthx */
2778 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2779 /* stw stwu stwux stwx */
2780 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2781 #if defined(TARGET_PPC64)
2782 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2783 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2785 static void gen_std(DisasContext *ctx)
2787 int rs;
2788 TCGv EA;
2790 rs = rS(ctx->opcode);
2791 if ((ctx->opcode & 0x3) == 0x2) {
2792 #if defined(CONFIG_USER_ONLY)
2793 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2794 #else
2795 /* stq */
2796 if (unlikely(ctx->mem_idx == 0)) {
2797 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2798 return;
2800 if (unlikely(rs & 1)) {
2801 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2802 return;
2804 if (unlikely(ctx->le_mode)) {
2805 /* Little-endian mode is not handled */
2806 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2807 return;
2809 gen_set_access_type(ctx, ACCESS_INT);
2810 EA = tcg_temp_new();
2811 gen_addr_imm_index(ctx, EA, 0x03);
2812 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2813 gen_addr_add(ctx, EA, EA, 8);
2814 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2815 tcg_temp_free(EA);
2816 #endif
2817 } else {
2818 /* std / stdu */
2819 if (Rc(ctx->opcode)) {
2820 if (unlikely(rA(ctx->opcode) == 0)) {
2821 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2822 return;
2825 gen_set_access_type(ctx, ACCESS_INT);
2826 EA = tcg_temp_new();
2827 gen_addr_imm_index(ctx, EA, 0x03);
2828 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2829 if (Rc(ctx->opcode))
2830 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2831 tcg_temp_free(EA);
2834 #endif
2835 /*** Integer load and store with byte reverse ***/
2836 /* lhbrx */
2837 static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2839 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2840 if (likely(!ctx->le_mode)) {
2841 tcg_gen_bswap16_tl(arg1, arg1);
2844 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2846 /* lwbrx */
2847 static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2849 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2850 if (likely(!ctx->le_mode)) {
2851 tcg_gen_bswap32_tl(arg1, arg1);
2854 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2856 /* sthbrx */
2857 static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2859 if (likely(!ctx->le_mode)) {
2860 TCGv t0 = tcg_temp_new();
2861 tcg_gen_ext16u_tl(t0, arg1);
2862 tcg_gen_bswap16_tl(t0, t0);
2863 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2864 tcg_temp_free(t0);
2865 } else {
2866 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2869 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2871 /* stwbrx */
2872 static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
2874 if (likely(!ctx->le_mode)) {
2875 TCGv t0 = tcg_temp_new();
2876 tcg_gen_ext32u_tl(t0, arg1);
2877 tcg_gen_bswap32_tl(t0, t0);
2878 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2879 tcg_temp_free(t0);
2880 } else {
2881 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2884 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2886 /*** Integer load and store multiple ***/
2888 /* lmw */
2889 static void gen_lmw(DisasContext *ctx)
2891 TCGv t0;
2892 TCGv_i32 t1;
2893 gen_set_access_type(ctx, ACCESS_INT);
2894 /* NIP cannot be restored if the memory exception comes from an helper */
2895 gen_update_nip(ctx, ctx->nip - 4);
2896 t0 = tcg_temp_new();
2897 t1 = tcg_const_i32(rD(ctx->opcode));
2898 gen_addr_imm_index(ctx, t0, 0);
2899 gen_helper_lmw(t0, t1);
2900 tcg_temp_free(t0);
2901 tcg_temp_free_i32(t1);
2904 /* stmw */
2905 static void gen_stmw(DisasContext *ctx)
2907 TCGv t0;
2908 TCGv_i32 t1;
2909 gen_set_access_type(ctx, ACCESS_INT);
2910 /* NIP cannot be restored if the memory exception comes from an helper */
2911 gen_update_nip(ctx, ctx->nip - 4);
2912 t0 = tcg_temp_new();
2913 t1 = tcg_const_i32(rS(ctx->opcode));
2914 gen_addr_imm_index(ctx, t0, 0);
2915 gen_helper_stmw(t0, t1);
2916 tcg_temp_free(t0);
2917 tcg_temp_free_i32(t1);
2920 /*** Integer load and store strings ***/
2922 /* lswi */
2923 /* PowerPC32 specification says we must generate an exception if
2924 * rA is in the range of registers to be loaded.
2925 * In an other hand, IBM says this is valid, but rA won't be loaded.
2926 * For now, I'll follow the spec...
2928 static void gen_lswi(DisasContext *ctx)
2930 TCGv t0;
2931 TCGv_i32 t1, t2;
2932 int nb = NB(ctx->opcode);
2933 int start = rD(ctx->opcode);
2934 int ra = rA(ctx->opcode);
2935 int nr;
2937 if (nb == 0)
2938 nb = 32;
2939 nr = nb / 4;
2940 if (unlikely(((start + nr) > 32 &&
2941 start <= ra && (start + nr - 32) > ra) ||
2942 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2943 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2944 return;
2946 gen_set_access_type(ctx, ACCESS_INT);
2947 /* NIP cannot be restored if the memory exception comes from an helper */
2948 gen_update_nip(ctx, ctx->nip - 4);
2949 t0 = tcg_temp_new();
2950 gen_addr_register(ctx, t0);
2951 t1 = tcg_const_i32(nb);
2952 t2 = tcg_const_i32(start);
2953 gen_helper_lsw(t0, t1, t2);
2954 tcg_temp_free(t0);
2955 tcg_temp_free_i32(t1);
2956 tcg_temp_free_i32(t2);
2959 /* lswx */
2960 static void gen_lswx(DisasContext *ctx)
2962 TCGv t0;
2963 TCGv_i32 t1, t2, t3;
2964 gen_set_access_type(ctx, ACCESS_INT);
2965 /* NIP cannot be restored if the memory exception comes from an helper */
2966 gen_update_nip(ctx, ctx->nip - 4);
2967 t0 = tcg_temp_new();
2968 gen_addr_reg_index(ctx, t0);
2969 t1 = tcg_const_i32(rD(ctx->opcode));
2970 t2 = tcg_const_i32(rA(ctx->opcode));
2971 t3 = tcg_const_i32(rB(ctx->opcode));
2972 gen_helper_lswx(t0, t1, t2, t3);
2973 tcg_temp_free(t0);
2974 tcg_temp_free_i32(t1);
2975 tcg_temp_free_i32(t2);
2976 tcg_temp_free_i32(t3);
2979 /* stswi */
2980 static void gen_stswi(DisasContext *ctx)
2982 TCGv t0;
2983 TCGv_i32 t1, t2;
2984 int nb = NB(ctx->opcode);
2985 gen_set_access_type(ctx, ACCESS_INT);
2986 /* NIP cannot be restored if the memory exception comes from an helper */
2987 gen_update_nip(ctx, ctx->nip - 4);
2988 t0 = tcg_temp_new();
2989 gen_addr_register(ctx, t0);
2990 if (nb == 0)
2991 nb = 32;
2992 t1 = tcg_const_i32(nb);
2993 t2 = tcg_const_i32(rS(ctx->opcode));
2994 gen_helper_stsw(t0, t1, t2);
2995 tcg_temp_free(t0);
2996 tcg_temp_free_i32(t1);
2997 tcg_temp_free_i32(t2);
3000 /* stswx */
3001 static void gen_stswx(DisasContext *ctx)
3003 TCGv t0;
3004 TCGv_i32 t1, t2;
3005 gen_set_access_type(ctx, ACCESS_INT);
3006 /* NIP cannot be restored if the memory exception comes from an helper */
3007 gen_update_nip(ctx, ctx->nip - 4);
3008 t0 = tcg_temp_new();
3009 gen_addr_reg_index(ctx, t0);
3010 t1 = tcg_temp_new_i32();
3011 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3012 tcg_gen_andi_i32(t1, t1, 0x7F);
3013 t2 = tcg_const_i32(rS(ctx->opcode));
3014 gen_helper_stsw(t0, t1, t2);
3015 tcg_temp_free(t0);
3016 tcg_temp_free_i32(t1);
3017 tcg_temp_free_i32(t2);
3020 /*** Memory synchronisation ***/
3021 /* eieio */
3022 static void gen_eieio(DisasContext *ctx)
3026 /* isync */
3027 static void gen_isync(DisasContext *ctx)
3029 gen_stop_exception(ctx);
3032 /* lwarx */
3033 static void gen_lwarx(DisasContext *ctx)
3035 TCGv t0;
3036 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3037 gen_set_access_type(ctx, ACCESS_RES);
3038 t0 = tcg_temp_local_new();
3039 gen_addr_reg_index(ctx, t0);
3040 gen_check_align(ctx, t0, 0x03);
3041 gen_qemu_ld32u(ctx, gpr, t0);
3042 tcg_gen_mov_tl(cpu_reserve, t0);
3043 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3044 tcg_temp_free(t0);
3047 #if defined(CONFIG_USER_ONLY)
3048 static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3049 int reg, int size)
3051 TCGv t0 = tcg_temp_new();
3052 uint32_t save_exception = ctx->exception;
3054 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUState, reserve_ea));
3055 tcg_gen_movi_tl(t0, (size << 5) | reg);
3056 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, reserve_info));
3057 tcg_temp_free(t0);
3058 gen_update_nip(ctx, ctx->nip-4);
3059 ctx->exception = POWERPC_EXCP_BRANCH;
3060 gen_exception(ctx, POWERPC_EXCP_STCX);
3061 ctx->exception = save_exception;
3063 #endif
3065 /* stwcx. */
3066 static void gen_stwcx_(DisasContext *ctx)
3068 TCGv t0;
3069 gen_set_access_type(ctx, ACCESS_RES);
3070 t0 = tcg_temp_local_new();
3071 gen_addr_reg_index(ctx, t0);
3072 gen_check_align(ctx, t0, 0x03);
3073 #if defined(CONFIG_USER_ONLY)
3074 gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3075 #else
3077 int l1;
3079 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3080 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3081 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3082 l1 = gen_new_label();
3083 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3084 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3085 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3086 gen_set_label(l1);
3087 tcg_gen_movi_tl(cpu_reserve, -1);
3089 #endif
3090 tcg_temp_free(t0);
3093 #if defined(TARGET_PPC64)
3094 /* ldarx */
3095 static void gen_ldarx(DisasContext *ctx)
3097 TCGv t0;
3098 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3099 gen_set_access_type(ctx, ACCESS_RES);
3100 t0 = tcg_temp_local_new();
3101 gen_addr_reg_index(ctx, t0);
3102 gen_check_align(ctx, t0, 0x07);
3103 gen_qemu_ld64(ctx, gpr, t0);
3104 tcg_gen_mov_tl(cpu_reserve, t0);
3105 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUState, reserve_val));
3106 tcg_temp_free(t0);
3109 /* stdcx. */
3110 static void gen_stdcx_(DisasContext *ctx)
3112 TCGv t0;
3113 gen_set_access_type(ctx, ACCESS_RES);
3114 t0 = tcg_temp_local_new();
3115 gen_addr_reg_index(ctx, t0);
3116 gen_check_align(ctx, t0, 0x07);
3117 #if defined(CONFIG_USER_ONLY)
3118 gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3119 #else
3121 int l1;
3122 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3123 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3124 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3125 l1 = gen_new_label();
3126 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3127 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3128 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3129 gen_set_label(l1);
3130 tcg_gen_movi_tl(cpu_reserve, -1);
3132 #endif
3133 tcg_temp_free(t0);
3135 #endif /* defined(TARGET_PPC64) */
3137 /* sync */
3138 static void gen_sync(DisasContext *ctx)
3142 /* wait */
3143 static void gen_wait(DisasContext *ctx)
3145 TCGv_i32 t0 = tcg_temp_new_i32();
3146 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3147 tcg_temp_free_i32(t0);
3148 /* Stop translation, as the CPU is supposed to sleep from now */
3149 gen_exception_err(ctx, EXCP_HLT, 1);
3152 /*** Floating-point load ***/
3153 #define GEN_LDF(name, ldop, opc, type) \
3154 static void glue(gen_, name)(DisasContext *ctx) \
3156 TCGv EA; \
3157 if (unlikely(!ctx->fpu_enabled)) { \
3158 gen_exception(ctx, POWERPC_EXCP_FPU); \
3159 return; \
3161 gen_set_access_type(ctx, ACCESS_FLOAT); \
3162 EA = tcg_temp_new(); \
3163 gen_addr_imm_index(ctx, EA, 0); \
3164 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3165 tcg_temp_free(EA); \
3168 #define GEN_LDUF(name, ldop, opc, type) \
3169 static void glue(gen_, name##u)(DisasContext *ctx) \
3171 TCGv EA; \
3172 if (unlikely(!ctx->fpu_enabled)) { \
3173 gen_exception(ctx, POWERPC_EXCP_FPU); \
3174 return; \
3176 if (unlikely(rA(ctx->opcode) == 0)) { \
3177 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3178 return; \
3180 gen_set_access_type(ctx, ACCESS_FLOAT); \
3181 EA = tcg_temp_new(); \
3182 gen_addr_imm_index(ctx, EA, 0); \
3183 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3184 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3185 tcg_temp_free(EA); \
3188 #define GEN_LDUXF(name, ldop, opc, type) \
3189 static void glue(gen_, name##ux)(DisasContext *ctx) \
3191 TCGv EA; \
3192 if (unlikely(!ctx->fpu_enabled)) { \
3193 gen_exception(ctx, POWERPC_EXCP_FPU); \
3194 return; \
3196 if (unlikely(rA(ctx->opcode) == 0)) { \
3197 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3198 return; \
3200 gen_set_access_type(ctx, ACCESS_FLOAT); \
3201 EA = tcg_temp_new(); \
3202 gen_addr_reg_index(ctx, EA); \
3203 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3204 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3205 tcg_temp_free(EA); \
3208 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3209 static void glue(gen_, name##x)(DisasContext *ctx) \
3211 TCGv EA; \
3212 if (unlikely(!ctx->fpu_enabled)) { \
3213 gen_exception(ctx, POWERPC_EXCP_FPU); \
3214 return; \
3216 gen_set_access_type(ctx, ACCESS_FLOAT); \
3217 EA = tcg_temp_new(); \
3218 gen_addr_reg_index(ctx, EA); \
3219 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3220 tcg_temp_free(EA); \
3223 #define GEN_LDFS(name, ldop, op, type) \
3224 GEN_LDF(name, ldop, op | 0x20, type); \
3225 GEN_LDUF(name, ldop, op | 0x21, type); \
3226 GEN_LDUXF(name, ldop, op | 0x01, type); \
3227 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3229 static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3231 TCGv t0 = tcg_temp_new();
3232 TCGv_i32 t1 = tcg_temp_new_i32();
3233 gen_qemu_ld32u(ctx, t0, arg2);
3234 tcg_gen_trunc_tl_i32(t1, t0);
3235 tcg_temp_free(t0);
3236 gen_helper_float32_to_float64(arg1, t1);
3237 tcg_temp_free_i32(t1);
3240 /* lfd lfdu lfdux lfdx */
3241 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3242 /* lfs lfsu lfsux lfsx */
3243 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3245 /*** Floating-point store ***/
3246 #define GEN_STF(name, stop, opc, type) \
3247 static void glue(gen_, name)(DisasContext *ctx) \
3249 TCGv EA; \
3250 if (unlikely(!ctx->fpu_enabled)) { \
3251 gen_exception(ctx, POWERPC_EXCP_FPU); \
3252 return; \
3254 gen_set_access_type(ctx, ACCESS_FLOAT); \
3255 EA = tcg_temp_new(); \
3256 gen_addr_imm_index(ctx, EA, 0); \
3257 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3258 tcg_temp_free(EA); \
3261 #define GEN_STUF(name, stop, opc, type) \
3262 static void glue(gen_, name##u)(DisasContext *ctx) \
3264 TCGv EA; \
3265 if (unlikely(!ctx->fpu_enabled)) { \
3266 gen_exception(ctx, POWERPC_EXCP_FPU); \
3267 return; \
3269 if (unlikely(rA(ctx->opcode) == 0)) { \
3270 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3271 return; \
3273 gen_set_access_type(ctx, ACCESS_FLOAT); \
3274 EA = tcg_temp_new(); \
3275 gen_addr_imm_index(ctx, EA, 0); \
3276 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3277 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3278 tcg_temp_free(EA); \
3281 #define GEN_STUXF(name, stop, opc, type) \
3282 static void glue(gen_, name##ux)(DisasContext *ctx) \
3284 TCGv EA; \
3285 if (unlikely(!ctx->fpu_enabled)) { \
3286 gen_exception(ctx, POWERPC_EXCP_FPU); \
3287 return; \
3289 if (unlikely(rA(ctx->opcode) == 0)) { \
3290 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3291 return; \
3293 gen_set_access_type(ctx, ACCESS_FLOAT); \
3294 EA = tcg_temp_new(); \
3295 gen_addr_reg_index(ctx, EA); \
3296 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3297 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3298 tcg_temp_free(EA); \
3301 #define GEN_STXF(name, stop, opc2, opc3, type) \
3302 static void glue(gen_, name##x)(DisasContext *ctx) \
3304 TCGv EA; \
3305 if (unlikely(!ctx->fpu_enabled)) { \
3306 gen_exception(ctx, POWERPC_EXCP_FPU); \
3307 return; \
3309 gen_set_access_type(ctx, ACCESS_FLOAT); \
3310 EA = tcg_temp_new(); \
3311 gen_addr_reg_index(ctx, EA); \
3312 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3313 tcg_temp_free(EA); \
3316 #define GEN_STFS(name, stop, op, type) \
3317 GEN_STF(name, stop, op | 0x20, type); \
3318 GEN_STUF(name, stop, op | 0x21, type); \
3319 GEN_STUXF(name, stop, op | 0x01, type); \
3320 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3322 static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3324 TCGv_i32 t0 = tcg_temp_new_i32();
3325 TCGv t1 = tcg_temp_new();
3326 gen_helper_float64_to_float32(t0, arg1);
3327 tcg_gen_extu_i32_tl(t1, t0);
3328 tcg_temp_free_i32(t0);
3329 gen_qemu_st32(ctx, t1, arg2);
3330 tcg_temp_free(t1);
3333 /* stfd stfdu stfdux stfdx */
3334 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3335 /* stfs stfsu stfsux stfsx */
3336 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3338 /* Optional: */
3339 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3341 TCGv t0 = tcg_temp_new();
3342 tcg_gen_trunc_i64_tl(t0, arg1),
3343 gen_qemu_st32(ctx, t0, arg2);
3344 tcg_temp_free(t0);
3346 /* stfiwx */
3347 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3349 /*** Branch ***/
3350 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3352 TranslationBlock *tb;
3353 tb = ctx->tb;
3354 #if defined(TARGET_PPC64)
3355 if (!ctx->sf_mode)
3356 dest = (uint32_t) dest;
3357 #endif
3358 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3359 likely(!ctx->singlestep_enabled)) {
3360 tcg_gen_goto_tb(n);
3361 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3362 tcg_gen_exit_tb((tcg_target_long)tb + n);
3363 } else {
3364 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3365 if (unlikely(ctx->singlestep_enabled)) {
3366 if ((ctx->singlestep_enabled &
3367 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3368 ctx->exception == POWERPC_EXCP_BRANCH) {
3369 target_ulong tmp = ctx->nip;
3370 ctx->nip = dest;
3371 gen_exception(ctx, POWERPC_EXCP_TRACE);
3372 ctx->nip = tmp;
3374 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3375 gen_debug_exception(ctx);
3378 tcg_gen_exit_tb(0);
3382 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3384 #if defined(TARGET_PPC64)
3385 if (ctx->sf_mode == 0)
3386 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3387 else
3388 #endif
3389 tcg_gen_movi_tl(cpu_lr, nip);
3392 /* b ba bl bla */
3393 static void gen_b(DisasContext *ctx)
3395 target_ulong li, target;
3397 ctx->exception = POWERPC_EXCP_BRANCH;
3398 /* sign extend LI */
3399 #if defined(TARGET_PPC64)
3400 if (ctx->sf_mode)
3401 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3402 else
3403 #endif
3404 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3405 if (likely(AA(ctx->opcode) == 0))
3406 target = ctx->nip + li - 4;
3407 else
3408 target = li;
3409 if (LK(ctx->opcode))
3410 gen_setlr(ctx, ctx->nip);
3411 gen_goto_tb(ctx, 0, target);
3414 #define BCOND_IM 0
3415 #define BCOND_LR 1
3416 #define BCOND_CTR 2
3418 static inline void gen_bcond(DisasContext *ctx, int type)
3420 uint32_t bo = BO(ctx->opcode);
3421 int l1;
3422 TCGv target;
3424 ctx->exception = POWERPC_EXCP_BRANCH;
3425 if (type == BCOND_LR || type == BCOND_CTR) {
3426 target = tcg_temp_local_new();
3427 if (type == BCOND_CTR)
3428 tcg_gen_mov_tl(target, cpu_ctr);
3429 else
3430 tcg_gen_mov_tl(target, cpu_lr);
3431 } else {
3432 TCGV_UNUSED(target);
3434 if (LK(ctx->opcode))
3435 gen_setlr(ctx, ctx->nip);
3436 l1 = gen_new_label();
3437 if ((bo & 0x4) == 0) {
3438 /* Decrement and test CTR */
3439 TCGv temp = tcg_temp_new();
3440 if (unlikely(type == BCOND_CTR)) {
3441 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3442 return;
3444 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3445 #if defined(TARGET_PPC64)
3446 if (!ctx->sf_mode)
3447 tcg_gen_ext32u_tl(temp, cpu_ctr);
3448 else
3449 #endif
3450 tcg_gen_mov_tl(temp, cpu_ctr);
3451 if (bo & 0x2) {
3452 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3453 } else {
3454 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3456 tcg_temp_free(temp);
3458 if ((bo & 0x10) == 0) {
3459 /* Test CR */
3460 uint32_t bi = BI(ctx->opcode);
3461 uint32_t mask = 1 << (3 - (bi & 0x03));
3462 TCGv_i32 temp = tcg_temp_new_i32();
3464 if (bo & 0x8) {
3465 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3466 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3467 } else {
3468 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3469 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3471 tcg_temp_free_i32(temp);
3473 if (type == BCOND_IM) {
3474 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3475 if (likely(AA(ctx->opcode) == 0)) {
3476 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3477 } else {
3478 gen_goto_tb(ctx, 0, li);
3480 gen_set_label(l1);
3481 gen_goto_tb(ctx, 1, ctx->nip);
3482 } else {
3483 #if defined(TARGET_PPC64)
3484 if (!(ctx->sf_mode))
3485 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3486 else
3487 #endif
3488 tcg_gen_andi_tl(cpu_nip, target, ~3);
3489 tcg_gen_exit_tb(0);
3490 gen_set_label(l1);
3491 #if defined(TARGET_PPC64)
3492 if (!(ctx->sf_mode))
3493 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3494 else
3495 #endif
3496 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3497 tcg_gen_exit_tb(0);
3501 static void gen_bc(DisasContext *ctx)
3503 gen_bcond(ctx, BCOND_IM);
3506 static void gen_bcctr(DisasContext *ctx)
3508 gen_bcond(ctx, BCOND_CTR);
3511 static void gen_bclr(DisasContext *ctx)
3513 gen_bcond(ctx, BCOND_LR);
3516 /*** Condition register logical ***/
3517 #define GEN_CRLOGIC(name, tcg_op, opc) \
3518 static void glue(gen_, name)(DisasContext *ctx) \
3520 uint8_t bitmask; \
3521 int sh; \
3522 TCGv_i32 t0, t1; \
3523 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3524 t0 = tcg_temp_new_i32(); \
3525 if (sh > 0) \
3526 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3527 else if (sh < 0) \
3528 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3529 else \
3530 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3531 t1 = tcg_temp_new_i32(); \
3532 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3533 if (sh > 0) \
3534 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3535 else if (sh < 0) \
3536 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3537 else \
3538 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3539 tcg_op(t0, t0, t1); \
3540 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3541 tcg_gen_andi_i32(t0, t0, bitmask); \
3542 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3543 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3544 tcg_temp_free_i32(t0); \
3545 tcg_temp_free_i32(t1); \
3548 /* crand */
3549 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3550 /* crandc */
3551 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3552 /* creqv */
3553 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3554 /* crnand */
3555 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3556 /* crnor */
3557 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3558 /* cror */
3559 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3560 /* crorc */
3561 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3562 /* crxor */
3563 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3565 /* mcrf */
3566 static void gen_mcrf(DisasContext *ctx)
3568 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3571 /*** System linkage ***/
3573 /* rfi (mem_idx only) */
3574 static void gen_rfi(DisasContext *ctx)
3576 #if defined(CONFIG_USER_ONLY)
3577 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3578 #else
3579 /* Restore CPU state */
3580 if (unlikely(!ctx->mem_idx)) {
3581 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3582 return;
3584 gen_helper_rfi();
3585 gen_sync_exception(ctx);
3586 #endif
3589 #if defined(TARGET_PPC64)
3590 static void gen_rfid(DisasContext *ctx)
3592 #if defined(CONFIG_USER_ONLY)
3593 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3594 #else
3595 /* Restore CPU state */
3596 if (unlikely(!ctx->mem_idx)) {
3597 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3598 return;
3600 gen_helper_rfid();
3601 gen_sync_exception(ctx);
3602 #endif
3605 static void gen_hrfid(DisasContext *ctx)
3607 #if defined(CONFIG_USER_ONLY)
3608 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3609 #else
3610 /* Restore CPU state */
3611 if (unlikely(ctx->mem_idx <= 1)) {
3612 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3613 return;
3615 gen_helper_hrfid();
3616 gen_sync_exception(ctx);
3617 #endif
3619 #endif
3621 /* sc */
3622 #if defined(CONFIG_USER_ONLY)
3623 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3624 #else
3625 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3626 #endif
3627 static void gen_sc(DisasContext *ctx)
3629 uint32_t lev;
3631 lev = (ctx->opcode >> 5) & 0x7F;
3632 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3635 /*** Trap ***/
3637 /* tw */
3638 static void gen_tw(DisasContext *ctx)
3640 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3641 /* Update the nip since this might generate a trap exception */
3642 gen_update_nip(ctx, ctx->nip);
3643 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3644 tcg_temp_free_i32(t0);
3647 /* twi */
3648 static void gen_twi(DisasContext *ctx)
3650 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3651 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3652 /* Update the nip since this might generate a trap exception */
3653 gen_update_nip(ctx, ctx->nip);
3654 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3655 tcg_temp_free(t0);
3656 tcg_temp_free_i32(t1);
3659 #if defined(TARGET_PPC64)
3660 /* td */
3661 static void gen_td(DisasContext *ctx)
3663 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3664 /* Update the nip since this might generate a trap exception */
3665 gen_update_nip(ctx, ctx->nip);
3666 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3667 tcg_temp_free_i32(t0);
3670 /* tdi */
3671 static void gen_tdi(DisasContext *ctx)
3673 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3674 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3675 /* Update the nip since this might generate a trap exception */
3676 gen_update_nip(ctx, ctx->nip);
3677 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3678 tcg_temp_free(t0);
3679 tcg_temp_free_i32(t1);
3681 #endif
3683 /*** Processor control ***/
3685 /* mcrxr */
3686 static void gen_mcrxr(DisasContext *ctx)
3688 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3689 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3690 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3693 /* mfcr mfocrf */
3694 static void gen_mfcr(DisasContext *ctx)
3696 uint32_t crm, crn;
3698 if (likely(ctx->opcode & 0x00100000)) {
3699 crm = CRM(ctx->opcode);
3700 if (likely(crm && ((crm & (crm - 1)) == 0))) {
3701 crn = ctz32 (crm);
3702 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3703 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
3704 cpu_gpr[rD(ctx->opcode)], crn * 4);
3706 } else {
3707 TCGv_i32 t0 = tcg_temp_new_i32();
3708 tcg_gen_mov_i32(t0, cpu_crf[0]);
3709 tcg_gen_shli_i32(t0, t0, 4);
3710 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
3711 tcg_gen_shli_i32(t0, t0, 4);
3712 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
3713 tcg_gen_shli_i32(t0, t0, 4);
3714 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
3715 tcg_gen_shli_i32(t0, t0, 4);
3716 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
3717 tcg_gen_shli_i32(t0, t0, 4);
3718 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
3719 tcg_gen_shli_i32(t0, t0, 4);
3720 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
3721 tcg_gen_shli_i32(t0, t0, 4);
3722 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
3723 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
3724 tcg_temp_free_i32(t0);
3728 /* mfmsr */
3729 static void gen_mfmsr(DisasContext *ctx)
3731 #if defined(CONFIG_USER_ONLY)
3732 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3733 #else
3734 if (unlikely(!ctx->mem_idx)) {
3735 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3736 return;
3738 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3739 #endif
3742 static void spr_noaccess(void *opaque, int gprn, int sprn)
3744 #if 0
3745 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3746 printf("ERROR: try to access SPR %d !\n", sprn);
3747 #endif
3749 #define SPR_NOACCESS (&spr_noaccess)
3751 /* mfspr */
3752 static inline void gen_op_mfspr(DisasContext *ctx)
3754 void (*read_cb)(void *opaque, int gprn, int sprn);
3755 uint32_t sprn = SPR(ctx->opcode);
3757 #if !defined(CONFIG_USER_ONLY)
3758 if (ctx->mem_idx == 2)
3759 read_cb = ctx->spr_cb[sprn].hea_read;
3760 else if (ctx->mem_idx)
3761 read_cb = ctx->spr_cb[sprn].oea_read;
3762 else
3763 #endif
3764 read_cb = ctx->spr_cb[sprn].uea_read;
3765 if (likely(read_cb != NULL)) {
3766 if (likely(read_cb != SPR_NOACCESS)) {
3767 (*read_cb)(ctx, rD(ctx->opcode), sprn);
3768 } else {
3769 /* Privilege exception */
3770 /* This is a hack to avoid warnings when running Linux:
3771 * this OS breaks the PowerPC virtualisation model,
3772 * allowing userland application to read the PVR
3774 if (sprn != SPR_PVR) {
3775 qemu_log("Trying to read privileged spr %d %03x at "
3776 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3777 printf("Trying to read privileged spr %d %03x at "
3778 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3780 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3782 } else {
3783 /* Not defined */
3784 qemu_log("Trying to read invalid spr %d %03x at "
3785 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3786 printf("Trying to read invalid spr %d %03x at " TARGET_FMT_lx "\n",
3787 sprn, sprn, ctx->nip);
3788 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3792 static void gen_mfspr(DisasContext *ctx)
3794 gen_op_mfspr(ctx);
3797 /* mftb */
3798 static void gen_mftb(DisasContext *ctx)
3800 gen_op_mfspr(ctx);
3803 /* mtcrf mtocrf*/
3804 static void gen_mtcrf(DisasContext *ctx)
3806 uint32_t crm, crn;
3808 crm = CRM(ctx->opcode);
3809 if (likely((ctx->opcode & 0x00100000))) {
3810 if (crm && ((crm & (crm - 1)) == 0)) {
3811 TCGv_i32 temp = tcg_temp_new_i32();
3812 crn = ctz32 (crm);
3813 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3814 tcg_gen_shri_i32(temp, temp, crn * 4);
3815 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
3816 tcg_temp_free_i32(temp);
3818 } else {
3819 TCGv_i32 temp = tcg_temp_new_i32();
3820 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3821 for (crn = 0 ; crn < 8 ; crn++) {
3822 if (crm & (1 << crn)) {
3823 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3824 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3827 tcg_temp_free_i32(temp);
3831 /* mtmsr */
3832 #if defined(TARGET_PPC64)
3833 static void gen_mtmsrd(DisasContext *ctx)
3835 #if defined(CONFIG_USER_ONLY)
3836 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3837 #else
3838 if (unlikely(!ctx->mem_idx)) {
3839 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3840 return;
3842 if (ctx->opcode & 0x00010000) {
3843 /* Special form that does not need any synchronisation */
3844 TCGv t0 = tcg_temp_new();
3845 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3846 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3847 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3848 tcg_temp_free(t0);
3849 } else {
3850 /* XXX: we need to update nip before the store
3851 * if we enter power saving mode, we will exit the loop
3852 * directly from ppc_store_msr
3854 gen_update_nip(ctx, ctx->nip);
3855 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3856 /* Must stop the translation as machine state (may have) changed */
3857 /* Note that mtmsr is not always defined as context-synchronizing */
3858 gen_stop_exception(ctx);
3860 #endif
3862 #endif
3864 static void gen_mtmsr(DisasContext *ctx)
3866 #if defined(CONFIG_USER_ONLY)
3867 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3868 #else
3869 if (unlikely(!ctx->mem_idx)) {
3870 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3871 return;
3873 if (ctx->opcode & 0x00010000) {
3874 /* Special form that does not need any synchronisation */
3875 TCGv t0 = tcg_temp_new();
3876 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3877 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3878 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3879 tcg_temp_free(t0);
3880 } else {
3881 /* XXX: we need to update nip before the store
3882 * if we enter power saving mode, we will exit the loop
3883 * directly from ppc_store_msr
3885 gen_update_nip(ctx, ctx->nip);
3886 #if defined(TARGET_PPC64)
3887 if (!ctx->sf_mode) {
3888 TCGv t0 = tcg_temp_new();
3889 TCGv t1 = tcg_temp_new();
3890 tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL);
3891 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
3892 tcg_gen_or_tl(t0, t0, t1);
3893 tcg_temp_free(t1);
3894 gen_helper_store_msr(t0);
3895 tcg_temp_free(t0);
3896 } else
3897 #endif
3898 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
3899 /* Must stop the translation as machine state (may have) changed */
3900 /* Note that mtmsr is not always defined as context-synchronizing */
3901 gen_stop_exception(ctx);
3903 #endif
3906 /* mtspr */
3907 static void gen_mtspr(DisasContext *ctx)
3909 void (*write_cb)(void *opaque, int sprn, int gprn);
3910 uint32_t sprn = SPR(ctx->opcode);
3912 #if !defined(CONFIG_USER_ONLY)
3913 if (ctx->mem_idx == 2)
3914 write_cb = ctx->spr_cb[sprn].hea_write;
3915 else if (ctx->mem_idx)
3916 write_cb = ctx->spr_cb[sprn].oea_write;
3917 else
3918 #endif
3919 write_cb = ctx->spr_cb[sprn].uea_write;
3920 if (likely(write_cb != NULL)) {
3921 if (likely(write_cb != SPR_NOACCESS)) {
3922 (*write_cb)(ctx, sprn, rS(ctx->opcode));
3923 } else {
3924 /* Privilege exception */
3925 qemu_log("Trying to write privileged spr %d %03x at "
3926 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3927 printf("Trying to write privileged spr %d %03x at " TARGET_FMT_lx
3928 "\n", sprn, sprn, ctx->nip);
3929 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
3931 } else {
3932 /* Not defined */
3933 qemu_log("Trying to write invalid spr %d %03x at "
3934 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip);
3935 printf("Trying to write invalid spr %d %03x at " TARGET_FMT_lx "\n",
3936 sprn, sprn, ctx->nip);
3937 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
3941 /*** Cache management ***/
3943 /* dcbf */
3944 static void gen_dcbf(DisasContext *ctx)
3946 /* XXX: specification says this is treated as a load by the MMU */
3947 TCGv t0;
3948 gen_set_access_type(ctx, ACCESS_CACHE);
3949 t0 = tcg_temp_new();
3950 gen_addr_reg_index(ctx, t0);
3951 gen_qemu_ld8u(ctx, t0, t0);
3952 tcg_temp_free(t0);
3955 /* dcbi (Supervisor only) */
3956 static void gen_dcbi(DisasContext *ctx)
3958 #if defined(CONFIG_USER_ONLY)
3959 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3960 #else
3961 TCGv EA, val;
3962 if (unlikely(!ctx->mem_idx)) {
3963 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3964 return;
3966 EA = tcg_temp_new();
3967 gen_set_access_type(ctx, ACCESS_CACHE);
3968 gen_addr_reg_index(ctx, EA);
3969 val = tcg_temp_new();
3970 /* XXX: specification says this should be treated as a store by the MMU */
3971 gen_qemu_ld8u(ctx, val, EA);
3972 gen_qemu_st8(ctx, val, EA);
3973 tcg_temp_free(val);
3974 tcg_temp_free(EA);
3975 #endif
3978 /* dcdst */
3979 static void gen_dcbst(DisasContext *ctx)
3981 /* XXX: specification say this is treated as a load by the MMU */
3982 TCGv t0;
3983 gen_set_access_type(ctx, ACCESS_CACHE);
3984 t0 = tcg_temp_new();
3985 gen_addr_reg_index(ctx, t0);
3986 gen_qemu_ld8u(ctx, t0, t0);
3987 tcg_temp_free(t0);
3990 /* dcbt */
3991 static void gen_dcbt(DisasContext *ctx)
3993 /* interpreted as no-op */
3994 /* XXX: specification say this is treated as a load by the MMU
3995 * but does not generate any exception
3999 /* dcbtst */
4000 static void gen_dcbtst(DisasContext *ctx)
4002 /* interpreted as no-op */
4003 /* XXX: specification say this is treated as a load by the MMU
4004 * but does not generate any exception
4008 /* dcbz */
4009 static void gen_dcbz(DisasContext *ctx)
4011 TCGv t0;
4012 gen_set_access_type(ctx, ACCESS_CACHE);
4013 /* NIP cannot be restored if the memory exception comes from an helper */
4014 gen_update_nip(ctx, ctx->nip - 4);
4015 t0 = tcg_temp_new();
4016 gen_addr_reg_index(ctx, t0);
4017 gen_helper_dcbz(t0);
4018 tcg_temp_free(t0);
4021 static void gen_dcbz_970(DisasContext *ctx)
4023 TCGv t0;
4024 gen_set_access_type(ctx, ACCESS_CACHE);
4025 /* NIP cannot be restored if the memory exception comes from an helper */
4026 gen_update_nip(ctx, ctx->nip - 4);
4027 t0 = tcg_temp_new();
4028 gen_addr_reg_index(ctx, t0);
4029 if (ctx->opcode & 0x00200000)
4030 gen_helper_dcbz(t0);
4031 else
4032 gen_helper_dcbz_970(t0);
4033 tcg_temp_free(t0);
4036 /* dst / dstt */
4037 static void gen_dst(DisasContext *ctx)
4039 if (rA(ctx->opcode) == 0) {
4040 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4041 } else {
4042 /* interpreted as no-op */
4046 /* dstst /dststt */
4047 static void gen_dstst(DisasContext *ctx)
4049 if (rA(ctx->opcode) == 0) {
4050 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4051 } else {
4052 /* interpreted as no-op */
4057 /* dss / dssall */
4058 static void gen_dss(DisasContext *ctx)
4060 /* interpreted as no-op */
4063 /* icbi */
4064 static void gen_icbi(DisasContext *ctx)
4066 TCGv t0;
4067 gen_set_access_type(ctx, ACCESS_CACHE);
4068 /* NIP cannot be restored if the memory exception comes from an helper */
4069 gen_update_nip(ctx, ctx->nip - 4);
4070 t0 = tcg_temp_new();
4071 gen_addr_reg_index(ctx, t0);
4072 gen_helper_icbi(t0);
4073 tcg_temp_free(t0);
4076 /* Optional: */
4077 /* dcba */
4078 static void gen_dcba(DisasContext *ctx)
4080 /* interpreted as no-op */
4081 /* XXX: specification say this is treated as a store by the MMU
4082 * but does not generate any exception
4086 /*** Segment register manipulation ***/
4087 /* Supervisor only: */
4089 /* mfsr */
4090 static void gen_mfsr(DisasContext *ctx)
4092 #if defined(CONFIG_USER_ONLY)
4093 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4094 #else
4095 TCGv t0;
4096 if (unlikely(!ctx->mem_idx)) {
4097 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4098 return;
4100 t0 = tcg_const_tl(SR(ctx->opcode));
4101 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4102 tcg_temp_free(t0);
4103 #endif
4106 /* mfsrin */
4107 static void gen_mfsrin(DisasContext *ctx)
4109 #if defined(CONFIG_USER_ONLY)
4110 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4111 #else
4112 TCGv t0;
4113 if (unlikely(!ctx->mem_idx)) {
4114 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4115 return;
4117 t0 = tcg_temp_new();
4118 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4119 tcg_gen_andi_tl(t0, t0, 0xF);
4120 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4121 tcg_temp_free(t0);
4122 #endif
4125 /* mtsr */
4126 static void gen_mtsr(DisasContext *ctx)
4128 #if defined(CONFIG_USER_ONLY)
4129 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4130 #else
4131 TCGv t0;
4132 if (unlikely(!ctx->mem_idx)) {
4133 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4134 return;
4136 t0 = tcg_const_tl(SR(ctx->opcode));
4137 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4138 tcg_temp_free(t0);
4139 #endif
4142 /* mtsrin */
4143 static void gen_mtsrin(DisasContext *ctx)
4145 #if defined(CONFIG_USER_ONLY)
4146 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4147 #else
4148 TCGv t0;
4149 if (unlikely(!ctx->mem_idx)) {
4150 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4151 return;
4153 t0 = tcg_temp_new();
4154 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4155 tcg_gen_andi_tl(t0, t0, 0xF);
4156 gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]);
4157 tcg_temp_free(t0);
4158 #endif
4161 #if defined(TARGET_PPC64)
4162 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4164 /* mfsr */
4165 static void gen_mfsr_64b(DisasContext *ctx)
4167 #if defined(CONFIG_USER_ONLY)
4168 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4169 #else
4170 TCGv t0;
4171 if (unlikely(!ctx->mem_idx)) {
4172 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4173 return;
4175 t0 = tcg_const_tl(SR(ctx->opcode));
4176 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4177 tcg_temp_free(t0);
4178 #endif
4181 /* mfsrin */
4182 static void gen_mfsrin_64b(DisasContext *ctx)
4184 #if defined(CONFIG_USER_ONLY)
4185 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4186 #else
4187 TCGv t0;
4188 if (unlikely(!ctx->mem_idx)) {
4189 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4190 return;
4192 t0 = tcg_temp_new();
4193 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4194 tcg_gen_andi_tl(t0, t0, 0xF);
4195 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0);
4196 tcg_temp_free(t0);
4197 #endif
4200 /* mtsr */
4201 static void gen_mtsr_64b(DisasContext *ctx)
4203 #if defined(CONFIG_USER_ONLY)
4204 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4205 #else
4206 TCGv t0;
4207 if (unlikely(!ctx->mem_idx)) {
4208 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4209 return;
4211 t0 = tcg_const_tl(SR(ctx->opcode));
4212 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4213 tcg_temp_free(t0);
4214 #endif
4217 /* mtsrin */
4218 static void gen_mtsrin_64b(DisasContext *ctx)
4220 #if defined(CONFIG_USER_ONLY)
4221 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4222 #else
4223 TCGv t0;
4224 if (unlikely(!ctx->mem_idx)) {
4225 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4226 return;
4228 t0 = tcg_temp_new();
4229 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4230 tcg_gen_andi_tl(t0, t0, 0xF);
4231 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]);
4232 tcg_temp_free(t0);
4233 #endif
4236 /* slbmte */
4237 static void gen_slbmte(DisasContext *ctx)
4239 #if defined(CONFIG_USER_ONLY)
4240 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4241 #else
4242 if (unlikely(!ctx->mem_idx)) {
4243 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4244 return;
4246 gen_helper_store_slb(cpu_gpr[rB(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
4247 #endif
4250 static void gen_slbmfee(DisasContext *ctx)
4252 #if defined(CONFIG_USER_ONLY)
4253 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4254 #else
4255 if (unlikely(!ctx->mem_idx)) {
4256 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4257 return;
4259 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)],
4260 cpu_gpr[rB(ctx->opcode)]);
4261 #endif
4264 static void gen_slbmfev(DisasContext *ctx)
4266 #if defined(CONFIG_USER_ONLY)
4267 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4268 #else
4269 if (unlikely(!ctx->mem_idx)) {
4270 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4271 return;
4273 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)],
4274 cpu_gpr[rB(ctx->opcode)]);
4275 #endif
4277 #endif /* defined(TARGET_PPC64) */
4279 /*** Lookaside buffer management ***/
4280 /* Optional & mem_idx only: */
4282 /* tlbia */
4283 static void gen_tlbia(DisasContext *ctx)
4285 #if defined(CONFIG_USER_ONLY)
4286 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4287 #else
4288 if (unlikely(!ctx->mem_idx)) {
4289 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4290 return;
4292 gen_helper_tlbia();
4293 #endif
4296 /* tlbiel */
4297 static void gen_tlbiel(DisasContext *ctx)
4299 #if defined(CONFIG_USER_ONLY)
4300 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4301 #else
4302 if (unlikely(!ctx->mem_idx)) {
4303 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4304 return;
4306 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4307 #endif
4310 /* tlbie */
4311 static void gen_tlbie(DisasContext *ctx)
4313 #if defined(CONFIG_USER_ONLY)
4314 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4315 #else
4316 if (unlikely(!ctx->mem_idx)) {
4317 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4318 return;
4320 #if defined(TARGET_PPC64)
4321 if (!ctx->sf_mode) {
4322 TCGv t0 = tcg_temp_new();
4323 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4324 gen_helper_tlbie(t0);
4325 tcg_temp_free(t0);
4326 } else
4327 #endif
4328 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
4329 #endif
4332 /* tlbsync */
4333 static void gen_tlbsync(DisasContext *ctx)
4335 #if defined(CONFIG_USER_ONLY)
4336 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4337 #else
4338 if (unlikely(!ctx->mem_idx)) {
4339 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4340 return;
4342 /* This has no effect: it should ensure that all previous
4343 * tlbie have completed
4345 gen_stop_exception(ctx);
4346 #endif
4349 #if defined(TARGET_PPC64)
4350 /* slbia */
4351 static void gen_slbia(DisasContext *ctx)
4353 #if defined(CONFIG_USER_ONLY)
4354 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4355 #else
4356 if (unlikely(!ctx->mem_idx)) {
4357 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4358 return;
4360 gen_helper_slbia();
4361 #endif
4364 /* slbie */
4365 static void gen_slbie(DisasContext *ctx)
4367 #if defined(CONFIG_USER_ONLY)
4368 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4369 #else
4370 if (unlikely(!ctx->mem_idx)) {
4371 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4372 return;
4374 gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]);
4375 #endif
4377 #endif
4379 /*** External control ***/
4380 /* Optional: */
4382 /* eciwx */
4383 static void gen_eciwx(DisasContext *ctx)
4385 TCGv t0;
4386 /* Should check EAR[E] ! */
4387 gen_set_access_type(ctx, ACCESS_EXT);
4388 t0 = tcg_temp_new();
4389 gen_addr_reg_index(ctx, t0);
4390 gen_check_align(ctx, t0, 0x03);
4391 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4392 tcg_temp_free(t0);
4395 /* ecowx */
4396 static void gen_ecowx(DisasContext *ctx)
4398 TCGv t0;
4399 /* Should check EAR[E] ! */
4400 gen_set_access_type(ctx, ACCESS_EXT);
4401 t0 = tcg_temp_new();
4402 gen_addr_reg_index(ctx, t0);
4403 gen_check_align(ctx, t0, 0x03);
4404 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4405 tcg_temp_free(t0);
4408 /* PowerPC 601 specific instructions */
4410 /* abs - abs. */
4411 static void gen_abs(DisasContext *ctx)
4413 int l1 = gen_new_label();
4414 int l2 = gen_new_label();
4415 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4416 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4417 tcg_gen_br(l2);
4418 gen_set_label(l1);
4419 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4420 gen_set_label(l2);
4421 if (unlikely(Rc(ctx->opcode) != 0))
4422 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4425 /* abso - abso. */
4426 static void gen_abso(DisasContext *ctx)
4428 int l1 = gen_new_label();
4429 int l2 = gen_new_label();
4430 int l3 = gen_new_label();
4431 /* Start with XER OV disabled, the most likely case */
4432 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4433 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4434 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4435 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4436 tcg_gen_br(l2);
4437 gen_set_label(l1);
4438 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4439 tcg_gen_br(l3);
4440 gen_set_label(l2);
4441 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4442 gen_set_label(l3);
4443 if (unlikely(Rc(ctx->opcode) != 0))
4444 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4447 /* clcs */
4448 static void gen_clcs(DisasContext *ctx)
4450 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4451 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4452 tcg_temp_free_i32(t0);
4453 /* Rc=1 sets CR0 to an undefined state */
4456 /* div - div. */
4457 static void gen_div(DisasContext *ctx)
4459 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4460 if (unlikely(Rc(ctx->opcode) != 0))
4461 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4464 /* divo - divo. */
4465 static void gen_divo(DisasContext *ctx)
4467 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4468 if (unlikely(Rc(ctx->opcode) != 0))
4469 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4472 /* divs - divs. */
4473 static void gen_divs(DisasContext *ctx)
4475 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4476 if (unlikely(Rc(ctx->opcode) != 0))
4477 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4480 /* divso - divso. */
4481 static void gen_divso(DisasContext *ctx)
4483 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4484 if (unlikely(Rc(ctx->opcode) != 0))
4485 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4488 /* doz - doz. */
4489 static void gen_doz(DisasContext *ctx)
4491 int l1 = gen_new_label();
4492 int l2 = gen_new_label();
4493 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4494 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4495 tcg_gen_br(l2);
4496 gen_set_label(l1);
4497 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4498 gen_set_label(l2);
4499 if (unlikely(Rc(ctx->opcode) != 0))
4500 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4503 /* dozo - dozo. */
4504 static void gen_dozo(DisasContext *ctx)
4506 int l1 = gen_new_label();
4507 int l2 = gen_new_label();
4508 TCGv t0 = tcg_temp_new();
4509 TCGv t1 = tcg_temp_new();
4510 TCGv t2 = tcg_temp_new();
4511 /* Start with XER OV disabled, the most likely case */
4512 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4513 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4514 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4515 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4516 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4517 tcg_gen_andc_tl(t1, t1, t2);
4518 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4519 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4520 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4521 tcg_gen_br(l2);
4522 gen_set_label(l1);
4523 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4524 gen_set_label(l2);
4525 tcg_temp_free(t0);
4526 tcg_temp_free(t1);
4527 tcg_temp_free(t2);
4528 if (unlikely(Rc(ctx->opcode) != 0))
4529 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4532 /* dozi */
4533 static void gen_dozi(DisasContext *ctx)
4535 target_long simm = SIMM(ctx->opcode);
4536 int l1 = gen_new_label();
4537 int l2 = gen_new_label();
4538 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4539 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4540 tcg_gen_br(l2);
4541 gen_set_label(l1);
4542 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4543 gen_set_label(l2);
4544 if (unlikely(Rc(ctx->opcode) != 0))
4545 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4548 /* lscbx - lscbx. */
4549 static void gen_lscbx(DisasContext *ctx)
4551 TCGv t0 = tcg_temp_new();
4552 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4553 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4554 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4556 gen_addr_reg_index(ctx, t0);
4557 /* NIP cannot be restored if the memory exception comes from an helper */
4558 gen_update_nip(ctx, ctx->nip - 4);
4559 gen_helper_lscbx(t0, t0, t1, t2, t3);
4560 tcg_temp_free_i32(t1);
4561 tcg_temp_free_i32(t2);
4562 tcg_temp_free_i32(t3);
4563 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4564 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4565 if (unlikely(Rc(ctx->opcode) != 0))
4566 gen_set_Rc0(ctx, t0);
4567 tcg_temp_free(t0);
4570 /* maskg - maskg. */
4571 static void gen_maskg(DisasContext *ctx)
4573 int l1 = gen_new_label();
4574 TCGv t0 = tcg_temp_new();
4575 TCGv t1 = tcg_temp_new();
4576 TCGv t2 = tcg_temp_new();
4577 TCGv t3 = tcg_temp_new();
4578 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4579 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4580 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4581 tcg_gen_addi_tl(t2, t0, 1);
4582 tcg_gen_shr_tl(t2, t3, t2);
4583 tcg_gen_shr_tl(t3, t3, t1);
4584 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4585 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4586 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4587 gen_set_label(l1);
4588 tcg_temp_free(t0);
4589 tcg_temp_free(t1);
4590 tcg_temp_free(t2);
4591 tcg_temp_free(t3);
4592 if (unlikely(Rc(ctx->opcode) != 0))
4593 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4596 /* maskir - maskir. */
4597 static void gen_maskir(DisasContext *ctx)
4599 TCGv t0 = tcg_temp_new();
4600 TCGv t1 = tcg_temp_new();
4601 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4602 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4603 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4604 tcg_temp_free(t0);
4605 tcg_temp_free(t1);
4606 if (unlikely(Rc(ctx->opcode) != 0))
4607 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4610 /* mul - mul. */
4611 static void gen_mul(DisasContext *ctx)
4613 TCGv_i64 t0 = tcg_temp_new_i64();
4614 TCGv_i64 t1 = tcg_temp_new_i64();
4615 TCGv t2 = tcg_temp_new();
4616 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4617 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4618 tcg_gen_mul_i64(t0, t0, t1);
4619 tcg_gen_trunc_i64_tl(t2, t0);
4620 gen_store_spr(SPR_MQ, t2);
4621 tcg_gen_shri_i64(t1, t0, 32);
4622 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4623 tcg_temp_free_i64(t0);
4624 tcg_temp_free_i64(t1);
4625 tcg_temp_free(t2);
4626 if (unlikely(Rc(ctx->opcode) != 0))
4627 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4630 /* mulo - mulo. */
4631 static void gen_mulo(DisasContext *ctx)
4633 int l1 = gen_new_label();
4634 TCGv_i64 t0 = tcg_temp_new_i64();
4635 TCGv_i64 t1 = tcg_temp_new_i64();
4636 TCGv t2 = tcg_temp_new();
4637 /* Start with XER OV disabled, the most likely case */
4638 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4639 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4640 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4641 tcg_gen_mul_i64(t0, t0, t1);
4642 tcg_gen_trunc_i64_tl(t2, t0);
4643 gen_store_spr(SPR_MQ, t2);
4644 tcg_gen_shri_i64(t1, t0, 32);
4645 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4646 tcg_gen_ext32s_i64(t1, t0);
4647 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4648 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4649 gen_set_label(l1);
4650 tcg_temp_free_i64(t0);
4651 tcg_temp_free_i64(t1);
4652 tcg_temp_free(t2);
4653 if (unlikely(Rc(ctx->opcode) != 0))
4654 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4657 /* nabs - nabs. */
4658 static void gen_nabs(DisasContext *ctx)
4660 int l1 = gen_new_label();
4661 int l2 = gen_new_label();
4662 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4663 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4664 tcg_gen_br(l2);
4665 gen_set_label(l1);
4666 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4667 gen_set_label(l2);
4668 if (unlikely(Rc(ctx->opcode) != 0))
4669 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4672 /* nabso - nabso. */
4673 static void gen_nabso(DisasContext *ctx)
4675 int l1 = gen_new_label();
4676 int l2 = gen_new_label();
4677 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4678 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4679 tcg_gen_br(l2);
4680 gen_set_label(l1);
4681 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4682 gen_set_label(l2);
4683 /* nabs never overflows */
4684 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4685 if (unlikely(Rc(ctx->opcode) != 0))
4686 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4689 /* rlmi - rlmi. */
4690 static void gen_rlmi(DisasContext *ctx)
4692 uint32_t mb = MB(ctx->opcode);
4693 uint32_t me = ME(ctx->opcode);
4694 TCGv t0 = tcg_temp_new();
4695 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4696 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4697 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4698 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4699 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4700 tcg_temp_free(t0);
4701 if (unlikely(Rc(ctx->opcode) != 0))
4702 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4705 /* rrib - rrib. */
4706 static void gen_rrib(DisasContext *ctx)
4708 TCGv t0 = tcg_temp_new();
4709 TCGv t1 = tcg_temp_new();
4710 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4711 tcg_gen_movi_tl(t1, 0x80000000);
4712 tcg_gen_shr_tl(t1, t1, t0);
4713 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4714 tcg_gen_and_tl(t0, t0, t1);
4715 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4716 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4717 tcg_temp_free(t0);
4718 tcg_temp_free(t1);
4719 if (unlikely(Rc(ctx->opcode) != 0))
4720 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4723 /* sle - sle. */
4724 static void gen_sle(DisasContext *ctx)
4726 TCGv t0 = tcg_temp_new();
4727 TCGv t1 = tcg_temp_new();
4728 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4729 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4730 tcg_gen_subfi_tl(t1, 32, t1);
4731 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4732 tcg_gen_or_tl(t1, t0, t1);
4733 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4734 gen_store_spr(SPR_MQ, t1);
4735 tcg_temp_free(t0);
4736 tcg_temp_free(t1);
4737 if (unlikely(Rc(ctx->opcode) != 0))
4738 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4741 /* sleq - sleq. */
4742 static void gen_sleq(DisasContext *ctx)
4744 TCGv t0 = tcg_temp_new();
4745 TCGv t1 = tcg_temp_new();
4746 TCGv t2 = tcg_temp_new();
4747 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4748 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4749 tcg_gen_shl_tl(t2, t2, t0);
4750 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4751 gen_load_spr(t1, SPR_MQ);
4752 gen_store_spr(SPR_MQ, t0);
4753 tcg_gen_and_tl(t0, t0, t2);
4754 tcg_gen_andc_tl(t1, t1, t2);
4755 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4756 tcg_temp_free(t0);
4757 tcg_temp_free(t1);
4758 tcg_temp_free(t2);
4759 if (unlikely(Rc(ctx->opcode) != 0))
4760 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4763 /* sliq - sliq. */
4764 static void gen_sliq(DisasContext *ctx)
4766 int sh = SH(ctx->opcode);
4767 TCGv t0 = tcg_temp_new();
4768 TCGv t1 = tcg_temp_new();
4769 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4770 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4771 tcg_gen_or_tl(t1, t0, t1);
4772 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4773 gen_store_spr(SPR_MQ, t1);
4774 tcg_temp_free(t0);
4775 tcg_temp_free(t1);
4776 if (unlikely(Rc(ctx->opcode) != 0))
4777 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4780 /* slliq - slliq. */
4781 static void gen_slliq(DisasContext *ctx)
4783 int sh = SH(ctx->opcode);
4784 TCGv t0 = tcg_temp_new();
4785 TCGv t1 = tcg_temp_new();
4786 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4787 gen_load_spr(t1, SPR_MQ);
4788 gen_store_spr(SPR_MQ, t0);
4789 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4790 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4791 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4792 tcg_temp_free(t0);
4793 tcg_temp_free(t1);
4794 if (unlikely(Rc(ctx->opcode) != 0))
4795 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4798 /* sllq - sllq. */
4799 static void gen_sllq(DisasContext *ctx)
4801 int l1 = gen_new_label();
4802 int l2 = gen_new_label();
4803 TCGv t0 = tcg_temp_local_new();
4804 TCGv t1 = tcg_temp_local_new();
4805 TCGv t2 = tcg_temp_local_new();
4806 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4807 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4808 tcg_gen_shl_tl(t1, t1, t2);
4809 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4810 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4811 gen_load_spr(t0, SPR_MQ);
4812 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4813 tcg_gen_br(l2);
4814 gen_set_label(l1);
4815 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4816 gen_load_spr(t2, SPR_MQ);
4817 tcg_gen_andc_tl(t1, t2, t1);
4818 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4819 gen_set_label(l2);
4820 tcg_temp_free(t0);
4821 tcg_temp_free(t1);
4822 tcg_temp_free(t2);
4823 if (unlikely(Rc(ctx->opcode) != 0))
4824 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4827 /* slq - slq. */
4828 static void gen_slq(DisasContext *ctx)
4830 int l1 = gen_new_label();
4831 TCGv t0 = tcg_temp_new();
4832 TCGv t1 = tcg_temp_new();
4833 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4834 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4835 tcg_gen_subfi_tl(t1, 32, t1);
4836 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4837 tcg_gen_or_tl(t1, t0, t1);
4838 gen_store_spr(SPR_MQ, t1);
4839 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4840 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4841 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4842 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4843 gen_set_label(l1);
4844 tcg_temp_free(t0);
4845 tcg_temp_free(t1);
4846 if (unlikely(Rc(ctx->opcode) != 0))
4847 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4850 /* sraiq - sraiq. */
4851 static void gen_sraiq(DisasContext *ctx)
4853 int sh = SH(ctx->opcode);
4854 int l1 = gen_new_label();
4855 TCGv t0 = tcg_temp_new();
4856 TCGv t1 = tcg_temp_new();
4857 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4858 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4859 tcg_gen_or_tl(t0, t0, t1);
4860 gen_store_spr(SPR_MQ, t0);
4861 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4862 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4863 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4864 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4865 gen_set_label(l1);
4866 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4867 tcg_temp_free(t0);
4868 tcg_temp_free(t1);
4869 if (unlikely(Rc(ctx->opcode) != 0))
4870 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4873 /* sraq - sraq. */
4874 static void gen_sraq(DisasContext *ctx)
4876 int l1 = gen_new_label();
4877 int l2 = gen_new_label();
4878 TCGv t0 = tcg_temp_new();
4879 TCGv t1 = tcg_temp_local_new();
4880 TCGv t2 = tcg_temp_local_new();
4881 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4882 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4883 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4884 tcg_gen_subfi_tl(t2, 32, t2);
4885 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4886 tcg_gen_or_tl(t0, t0, t2);
4887 gen_store_spr(SPR_MQ, t0);
4888 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4889 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4890 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4891 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4892 gen_set_label(l1);
4893 tcg_temp_free(t0);
4894 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4895 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4896 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4897 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4898 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4899 gen_set_label(l2);
4900 tcg_temp_free(t1);
4901 tcg_temp_free(t2);
4902 if (unlikely(Rc(ctx->opcode) != 0))
4903 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4906 /* sre - sre. */
4907 static void gen_sre(DisasContext *ctx)
4909 TCGv t0 = tcg_temp_new();
4910 TCGv t1 = tcg_temp_new();
4911 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4912 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4913 tcg_gen_subfi_tl(t1, 32, t1);
4914 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4915 tcg_gen_or_tl(t1, t0, t1);
4916 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4917 gen_store_spr(SPR_MQ, t1);
4918 tcg_temp_free(t0);
4919 tcg_temp_free(t1);
4920 if (unlikely(Rc(ctx->opcode) != 0))
4921 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4924 /* srea - srea. */
4925 static void gen_srea(DisasContext *ctx)
4927 TCGv t0 = tcg_temp_new();
4928 TCGv t1 = tcg_temp_new();
4929 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4930 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4931 gen_store_spr(SPR_MQ, t0);
4932 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4933 tcg_temp_free(t0);
4934 tcg_temp_free(t1);
4935 if (unlikely(Rc(ctx->opcode) != 0))
4936 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4939 /* sreq */
4940 static void gen_sreq(DisasContext *ctx)
4942 TCGv t0 = tcg_temp_new();
4943 TCGv t1 = tcg_temp_new();
4944 TCGv t2 = tcg_temp_new();
4945 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4946 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4947 tcg_gen_shr_tl(t1, t1, t0);
4948 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4949 gen_load_spr(t2, SPR_MQ);
4950 gen_store_spr(SPR_MQ, t0);
4951 tcg_gen_and_tl(t0, t0, t1);
4952 tcg_gen_andc_tl(t2, t2, t1);
4953 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
4954 tcg_temp_free(t0);
4955 tcg_temp_free(t1);
4956 tcg_temp_free(t2);
4957 if (unlikely(Rc(ctx->opcode) != 0))
4958 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4961 /* sriq */
4962 static void gen_sriq(DisasContext *ctx)
4964 int sh = SH(ctx->opcode);
4965 TCGv t0 = tcg_temp_new();
4966 TCGv t1 = tcg_temp_new();
4967 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4968 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4969 tcg_gen_or_tl(t1, t0, t1);
4970 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4971 gen_store_spr(SPR_MQ, t1);
4972 tcg_temp_free(t0);
4973 tcg_temp_free(t1);
4974 if (unlikely(Rc(ctx->opcode) != 0))
4975 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4978 /* srliq */
4979 static void gen_srliq(DisasContext *ctx)
4981 int sh = SH(ctx->opcode);
4982 TCGv t0 = tcg_temp_new();
4983 TCGv t1 = tcg_temp_new();
4984 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4985 gen_load_spr(t1, SPR_MQ);
4986 gen_store_spr(SPR_MQ, t0);
4987 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
4988 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
4989 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4990 tcg_temp_free(t0);
4991 tcg_temp_free(t1);
4992 if (unlikely(Rc(ctx->opcode) != 0))
4993 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4996 /* srlq */
4997 static void gen_srlq(DisasContext *ctx)
4999 int l1 = gen_new_label();
5000 int l2 = gen_new_label();
5001 TCGv t0 = tcg_temp_local_new();
5002 TCGv t1 = tcg_temp_local_new();
5003 TCGv t2 = tcg_temp_local_new();
5004 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5005 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5006 tcg_gen_shr_tl(t2, t1, t2);
5007 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5008 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5009 gen_load_spr(t0, SPR_MQ);
5010 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5011 tcg_gen_br(l2);
5012 gen_set_label(l1);
5013 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5014 tcg_gen_and_tl(t0, t0, t2);
5015 gen_load_spr(t1, SPR_MQ);
5016 tcg_gen_andc_tl(t1, t1, t2);
5017 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5018 gen_set_label(l2);
5019 tcg_temp_free(t0);
5020 tcg_temp_free(t1);
5021 tcg_temp_free(t2);
5022 if (unlikely(Rc(ctx->opcode) != 0))
5023 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5026 /* srq */
5027 static void gen_srq(DisasContext *ctx)
5029 int l1 = gen_new_label();
5030 TCGv t0 = tcg_temp_new();
5031 TCGv t1 = tcg_temp_new();
5032 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5033 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5034 tcg_gen_subfi_tl(t1, 32, t1);
5035 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5036 tcg_gen_or_tl(t1, t0, t1);
5037 gen_store_spr(SPR_MQ, t1);
5038 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5039 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5040 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5041 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5042 gen_set_label(l1);
5043 tcg_temp_free(t0);
5044 tcg_temp_free(t1);
5045 if (unlikely(Rc(ctx->opcode) != 0))
5046 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5049 /* PowerPC 602 specific instructions */
5051 /* dsa */
5052 static void gen_dsa(DisasContext *ctx)
5054 /* XXX: TODO */
5055 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5058 /* esa */
5059 static void gen_esa(DisasContext *ctx)
5061 /* XXX: TODO */
5062 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5065 /* mfrom */
5066 static void gen_mfrom(DisasContext *ctx)
5068 #if defined(CONFIG_USER_ONLY)
5069 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5070 #else
5071 if (unlikely(!ctx->mem_idx)) {
5072 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5073 return;
5075 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5076 #endif
5079 /* 602 - 603 - G2 TLB management */
5081 /* tlbld */
5082 static void gen_tlbld_6xx(DisasContext *ctx)
5084 #if defined(CONFIG_USER_ONLY)
5085 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5086 #else
5087 if (unlikely(!ctx->mem_idx)) {
5088 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5089 return;
5091 gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5092 #endif
5095 /* tlbli */
5096 static void gen_tlbli_6xx(DisasContext *ctx)
5098 #if defined(CONFIG_USER_ONLY)
5099 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5100 #else
5101 if (unlikely(!ctx->mem_idx)) {
5102 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5103 return;
5105 gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5106 #endif
5109 /* 74xx TLB management */
5111 /* tlbld */
5112 static void gen_tlbld_74xx(DisasContext *ctx)
5114 #if defined(CONFIG_USER_ONLY)
5115 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5116 #else
5117 if (unlikely(!ctx->mem_idx)) {
5118 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5119 return;
5121 gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5122 #endif
5125 /* tlbli */
5126 static void gen_tlbli_74xx(DisasContext *ctx)
5128 #if defined(CONFIG_USER_ONLY)
5129 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5130 #else
5131 if (unlikely(!ctx->mem_idx)) {
5132 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5133 return;
5135 gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5136 #endif
5139 /* POWER instructions not in PowerPC 601 */
5141 /* clf */
5142 static void gen_clf(DisasContext *ctx)
5144 /* Cache line flush: implemented as no-op */
5147 /* cli */
5148 static void gen_cli(DisasContext *ctx)
5150 /* Cache line invalidate: privileged and treated as no-op */
5151 #if defined(CONFIG_USER_ONLY)
5152 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5153 #else
5154 if (unlikely(!ctx->mem_idx)) {
5155 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5156 return;
5158 #endif
5161 /* dclst */
5162 static void gen_dclst(DisasContext *ctx)
5164 /* Data cache line store: treated as no-op */
5167 static void gen_mfsri(DisasContext *ctx)
5169 #if defined(CONFIG_USER_ONLY)
5170 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5171 #else
5172 int ra = rA(ctx->opcode);
5173 int rd = rD(ctx->opcode);
5174 TCGv t0;
5175 if (unlikely(!ctx->mem_idx)) {
5176 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5177 return;
5179 t0 = tcg_temp_new();
5180 gen_addr_reg_index(ctx, t0);
5181 tcg_gen_shri_tl(t0, t0, 28);
5182 tcg_gen_andi_tl(t0, t0, 0xF);
5183 gen_helper_load_sr(cpu_gpr[rd], t0);
5184 tcg_temp_free(t0);
5185 if (ra != 0 && ra != rd)
5186 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5187 #endif
5190 static void gen_rac(DisasContext *ctx)
5192 #if defined(CONFIG_USER_ONLY)
5193 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5194 #else
5195 TCGv t0;
5196 if (unlikely(!ctx->mem_idx)) {
5197 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5198 return;
5200 t0 = tcg_temp_new();
5201 gen_addr_reg_index(ctx, t0);
5202 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5203 tcg_temp_free(t0);
5204 #endif
5207 static void gen_rfsvc(DisasContext *ctx)
5209 #if defined(CONFIG_USER_ONLY)
5210 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5211 #else
5212 if (unlikely(!ctx->mem_idx)) {
5213 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5214 return;
5216 gen_helper_rfsvc();
5217 gen_sync_exception(ctx);
5218 #endif
5221 /* svc is not implemented for now */
5223 /* POWER2 specific instructions */
5224 /* Quad manipulation (load/store two floats at a time) */
5226 /* lfq */
5227 static void gen_lfq(DisasContext *ctx)
5229 int rd = rD(ctx->opcode);
5230 TCGv t0;
5231 gen_set_access_type(ctx, ACCESS_FLOAT);
5232 t0 = tcg_temp_new();
5233 gen_addr_imm_index(ctx, t0, 0);
5234 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5235 gen_addr_add(ctx, t0, t0, 8);
5236 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5237 tcg_temp_free(t0);
5240 /* lfqu */
5241 static void gen_lfqu(DisasContext *ctx)
5243 int ra = rA(ctx->opcode);
5244 int rd = rD(ctx->opcode);
5245 TCGv t0, t1;
5246 gen_set_access_type(ctx, ACCESS_FLOAT);
5247 t0 = tcg_temp_new();
5248 t1 = tcg_temp_new();
5249 gen_addr_imm_index(ctx, t0, 0);
5250 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5251 gen_addr_add(ctx, t1, t0, 8);
5252 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5253 if (ra != 0)
5254 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5255 tcg_temp_free(t0);
5256 tcg_temp_free(t1);
5259 /* lfqux */
5260 static void gen_lfqux(DisasContext *ctx)
5262 int ra = rA(ctx->opcode);
5263 int rd = rD(ctx->opcode);
5264 gen_set_access_type(ctx, ACCESS_FLOAT);
5265 TCGv t0, t1;
5266 t0 = tcg_temp_new();
5267 gen_addr_reg_index(ctx, t0);
5268 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5269 t1 = tcg_temp_new();
5270 gen_addr_add(ctx, t1, t0, 8);
5271 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5272 tcg_temp_free(t1);
5273 if (ra != 0)
5274 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5275 tcg_temp_free(t0);
5278 /* lfqx */
5279 static void gen_lfqx(DisasContext *ctx)
5281 int rd = rD(ctx->opcode);
5282 TCGv t0;
5283 gen_set_access_type(ctx, ACCESS_FLOAT);
5284 t0 = tcg_temp_new();
5285 gen_addr_reg_index(ctx, t0);
5286 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5287 gen_addr_add(ctx, t0, t0, 8);
5288 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5289 tcg_temp_free(t0);
5292 /* stfq */
5293 static void gen_stfq(DisasContext *ctx)
5295 int rd = rD(ctx->opcode);
5296 TCGv t0;
5297 gen_set_access_type(ctx, ACCESS_FLOAT);
5298 t0 = tcg_temp_new();
5299 gen_addr_imm_index(ctx, t0, 0);
5300 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5301 gen_addr_add(ctx, t0, t0, 8);
5302 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5303 tcg_temp_free(t0);
5306 /* stfqu */
5307 static void gen_stfqu(DisasContext *ctx)
5309 int ra = rA(ctx->opcode);
5310 int rd = rD(ctx->opcode);
5311 TCGv t0, t1;
5312 gen_set_access_type(ctx, ACCESS_FLOAT);
5313 t0 = tcg_temp_new();
5314 gen_addr_imm_index(ctx, t0, 0);
5315 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5316 t1 = tcg_temp_new();
5317 gen_addr_add(ctx, t1, t0, 8);
5318 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5319 tcg_temp_free(t1);
5320 if (ra != 0)
5321 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5322 tcg_temp_free(t0);
5325 /* stfqux */
5326 static void gen_stfqux(DisasContext *ctx)
5328 int ra = rA(ctx->opcode);
5329 int rd = rD(ctx->opcode);
5330 TCGv t0, t1;
5331 gen_set_access_type(ctx, ACCESS_FLOAT);
5332 t0 = tcg_temp_new();
5333 gen_addr_reg_index(ctx, t0);
5334 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5335 t1 = tcg_temp_new();
5336 gen_addr_add(ctx, t1, t0, 8);
5337 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5338 tcg_temp_free(t1);
5339 if (ra != 0)
5340 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5341 tcg_temp_free(t0);
5344 /* stfqx */
5345 static void gen_stfqx(DisasContext *ctx)
5347 int rd = rD(ctx->opcode);
5348 TCGv t0;
5349 gen_set_access_type(ctx, ACCESS_FLOAT);
5350 t0 = tcg_temp_new();
5351 gen_addr_reg_index(ctx, t0);
5352 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5353 gen_addr_add(ctx, t0, t0, 8);
5354 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5355 tcg_temp_free(t0);
5358 /* BookE specific instructions */
5360 /* XXX: not implemented on 440 ? */
5361 static void gen_mfapidi(DisasContext *ctx)
5363 /* XXX: TODO */
5364 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5367 /* XXX: not implemented on 440 ? */
5368 static void gen_tlbiva(DisasContext *ctx)
5370 #if defined(CONFIG_USER_ONLY)
5371 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5372 #else
5373 TCGv t0;
5374 if (unlikely(!ctx->mem_idx)) {
5375 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5376 return;
5378 t0 = tcg_temp_new();
5379 gen_addr_reg_index(ctx, t0);
5380 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]);
5381 tcg_temp_free(t0);
5382 #endif
5385 /* All 405 MAC instructions are translated here */
5386 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5387 int ra, int rb, int rt, int Rc)
5389 TCGv t0, t1;
5391 t0 = tcg_temp_local_new();
5392 t1 = tcg_temp_local_new();
5394 switch (opc3 & 0x0D) {
5395 case 0x05:
5396 /* macchw - macchw. - macchwo - macchwo. */
5397 /* macchws - macchws. - macchwso - macchwso. */
5398 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5399 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5400 /* mulchw - mulchw. */
5401 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5402 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5403 tcg_gen_ext16s_tl(t1, t1);
5404 break;
5405 case 0x04:
5406 /* macchwu - macchwu. - macchwuo - macchwuo. */
5407 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5408 /* mulchwu - mulchwu. */
5409 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5410 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5411 tcg_gen_ext16u_tl(t1, t1);
5412 break;
5413 case 0x01:
5414 /* machhw - machhw. - machhwo - machhwo. */
5415 /* machhws - machhws. - machhwso - machhwso. */
5416 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5417 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5418 /* mulhhw - mulhhw. */
5419 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5420 tcg_gen_ext16s_tl(t0, t0);
5421 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5422 tcg_gen_ext16s_tl(t1, t1);
5423 break;
5424 case 0x00:
5425 /* machhwu - machhwu. - machhwuo - machhwuo. */
5426 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5427 /* mulhhwu - mulhhwu. */
5428 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5429 tcg_gen_ext16u_tl(t0, t0);
5430 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5431 tcg_gen_ext16u_tl(t1, t1);
5432 break;
5433 case 0x0D:
5434 /* maclhw - maclhw. - maclhwo - maclhwo. */
5435 /* maclhws - maclhws. - maclhwso - maclhwso. */
5436 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5437 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5438 /* mullhw - mullhw. */
5439 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5440 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5441 break;
5442 case 0x0C:
5443 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5444 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5445 /* mullhwu - mullhwu. */
5446 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5447 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5448 break;
5450 if (opc2 & 0x04) {
5451 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5452 tcg_gen_mul_tl(t1, t0, t1);
5453 if (opc2 & 0x02) {
5454 /* nmultiply-and-accumulate (0x0E) */
5455 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5456 } else {
5457 /* multiply-and-accumulate (0x0C) */
5458 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5461 if (opc3 & 0x12) {
5462 /* Check overflow and/or saturate */
5463 int l1 = gen_new_label();
5465 if (opc3 & 0x10) {
5466 /* Start with XER OV disabled, the most likely case */
5467 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5469 if (opc3 & 0x01) {
5470 /* Signed */
5471 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5472 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5473 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5474 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5475 if (opc3 & 0x02) {
5476 /* Saturate */
5477 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5478 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5480 } else {
5481 /* Unsigned */
5482 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5483 if (opc3 & 0x02) {
5484 /* Saturate */
5485 tcg_gen_movi_tl(t0, UINT32_MAX);
5488 if (opc3 & 0x10) {
5489 /* Check overflow */
5490 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5492 gen_set_label(l1);
5493 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5495 } else {
5496 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5498 tcg_temp_free(t0);
5499 tcg_temp_free(t1);
5500 if (unlikely(Rc) != 0) {
5501 /* Update Rc0 */
5502 gen_set_Rc0(ctx, cpu_gpr[rt]);
5506 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5507 static void glue(gen_, name)(DisasContext *ctx) \
5509 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5510 rD(ctx->opcode), Rc(ctx->opcode)); \
5513 /* macchw - macchw. */
5514 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5515 /* macchwo - macchwo. */
5516 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5517 /* macchws - macchws. */
5518 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5519 /* macchwso - macchwso. */
5520 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5521 /* macchwsu - macchwsu. */
5522 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5523 /* macchwsuo - macchwsuo. */
5524 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5525 /* macchwu - macchwu. */
5526 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5527 /* macchwuo - macchwuo. */
5528 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5529 /* machhw - machhw. */
5530 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5531 /* machhwo - machhwo. */
5532 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5533 /* machhws - machhws. */
5534 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5535 /* machhwso - machhwso. */
5536 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5537 /* machhwsu - machhwsu. */
5538 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5539 /* machhwsuo - machhwsuo. */
5540 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5541 /* machhwu - machhwu. */
5542 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5543 /* machhwuo - machhwuo. */
5544 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5545 /* maclhw - maclhw. */
5546 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5547 /* maclhwo - maclhwo. */
5548 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5549 /* maclhws - maclhws. */
5550 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5551 /* maclhwso - maclhwso. */
5552 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5553 /* maclhwu - maclhwu. */
5554 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5555 /* maclhwuo - maclhwuo. */
5556 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5557 /* maclhwsu - maclhwsu. */
5558 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5559 /* maclhwsuo - maclhwsuo. */
5560 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5561 /* nmacchw - nmacchw. */
5562 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5563 /* nmacchwo - nmacchwo. */
5564 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5565 /* nmacchws - nmacchws. */
5566 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5567 /* nmacchwso - nmacchwso. */
5568 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5569 /* nmachhw - nmachhw. */
5570 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5571 /* nmachhwo - nmachhwo. */
5572 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5573 /* nmachhws - nmachhws. */
5574 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5575 /* nmachhwso - nmachhwso. */
5576 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5577 /* nmaclhw - nmaclhw. */
5578 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5579 /* nmaclhwo - nmaclhwo. */
5580 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5581 /* nmaclhws - nmaclhws. */
5582 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5583 /* nmaclhwso - nmaclhwso. */
5584 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5586 /* mulchw - mulchw. */
5587 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5588 /* mulchwu - mulchwu. */
5589 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5590 /* mulhhw - mulhhw. */
5591 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5592 /* mulhhwu - mulhhwu. */
5593 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5594 /* mullhw - mullhw. */
5595 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5596 /* mullhwu - mullhwu. */
5597 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5599 /* mfdcr */
5600 static void gen_mfdcr(DisasContext *ctx)
5602 #if defined(CONFIG_USER_ONLY)
5603 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5604 #else
5605 TCGv dcrn;
5606 if (unlikely(!ctx->mem_idx)) {
5607 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5608 return;
5610 /* NIP cannot be restored if the memory exception comes from an helper */
5611 gen_update_nip(ctx, ctx->nip - 4);
5612 dcrn = tcg_const_tl(SPR(ctx->opcode));
5613 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn);
5614 tcg_temp_free(dcrn);
5615 #endif
5618 /* mtdcr */
5619 static void gen_mtdcr(DisasContext *ctx)
5621 #if defined(CONFIG_USER_ONLY)
5622 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5623 #else
5624 TCGv dcrn;
5625 if (unlikely(!ctx->mem_idx)) {
5626 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5627 return;
5629 /* NIP cannot be restored if the memory exception comes from an helper */
5630 gen_update_nip(ctx, ctx->nip - 4);
5631 dcrn = tcg_const_tl(SPR(ctx->opcode));
5632 gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]);
5633 tcg_temp_free(dcrn);
5634 #endif
5637 /* mfdcrx */
5638 /* XXX: not implemented on 440 ? */
5639 static void gen_mfdcrx(DisasContext *ctx)
5641 #if defined(CONFIG_USER_ONLY)
5642 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5643 #else
5644 if (unlikely(!ctx->mem_idx)) {
5645 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5646 return;
5648 /* NIP cannot be restored if the memory exception comes from an helper */
5649 gen_update_nip(ctx, ctx->nip - 4);
5650 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5651 /* Note: Rc update flag set leads to undefined state of Rc0 */
5652 #endif
5655 /* mtdcrx */
5656 /* XXX: not implemented on 440 ? */
5657 static void gen_mtdcrx(DisasContext *ctx)
5659 #if defined(CONFIG_USER_ONLY)
5660 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5661 #else
5662 if (unlikely(!ctx->mem_idx)) {
5663 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5664 return;
5666 /* NIP cannot be restored if the memory exception comes from an helper */
5667 gen_update_nip(ctx, ctx->nip - 4);
5668 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5669 /* Note: Rc update flag set leads to undefined state of Rc0 */
5670 #endif
5673 /* mfdcrux (PPC 460) : user-mode access to DCR */
5674 static void gen_mfdcrux(DisasContext *ctx)
5676 /* NIP cannot be restored if the memory exception comes from an helper */
5677 gen_update_nip(ctx, ctx->nip - 4);
5678 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5679 /* Note: Rc update flag set leads to undefined state of Rc0 */
5682 /* mtdcrux (PPC 460) : user-mode access to DCR */
5683 static void gen_mtdcrux(DisasContext *ctx)
5685 /* NIP cannot be restored if the memory exception comes from an helper */
5686 gen_update_nip(ctx, ctx->nip - 4);
5687 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5688 /* Note: Rc update flag set leads to undefined state of Rc0 */
5691 /* dccci */
5692 static void gen_dccci(DisasContext *ctx)
5694 #if defined(CONFIG_USER_ONLY)
5695 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5696 #else
5697 if (unlikely(!ctx->mem_idx)) {
5698 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5699 return;
5701 /* interpreted as no-op */
5702 #endif
5705 /* dcread */
5706 static void gen_dcread(DisasContext *ctx)
5708 #if defined(CONFIG_USER_ONLY)
5709 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5710 #else
5711 TCGv EA, val;
5712 if (unlikely(!ctx->mem_idx)) {
5713 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5714 return;
5716 gen_set_access_type(ctx, ACCESS_CACHE);
5717 EA = tcg_temp_new();
5718 gen_addr_reg_index(ctx, EA);
5719 val = tcg_temp_new();
5720 gen_qemu_ld32u(ctx, val, EA);
5721 tcg_temp_free(val);
5722 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5723 tcg_temp_free(EA);
5724 #endif
5727 /* icbt */
5728 static void gen_icbt_40x(DisasContext *ctx)
5730 /* interpreted as no-op */
5731 /* XXX: specification say this is treated as a load by the MMU
5732 * but does not generate any exception
5736 /* iccci */
5737 static void gen_iccci(DisasContext *ctx)
5739 #if defined(CONFIG_USER_ONLY)
5740 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5741 #else
5742 if (unlikely(!ctx->mem_idx)) {
5743 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5744 return;
5746 /* interpreted as no-op */
5747 #endif
5750 /* icread */
5751 static void gen_icread(DisasContext *ctx)
5753 #if defined(CONFIG_USER_ONLY)
5754 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5755 #else
5756 if (unlikely(!ctx->mem_idx)) {
5757 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5758 return;
5760 /* interpreted as no-op */
5761 #endif
5764 /* rfci (mem_idx only) */
5765 static void gen_rfci_40x(DisasContext *ctx)
5767 #if defined(CONFIG_USER_ONLY)
5768 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5769 #else
5770 if (unlikely(!ctx->mem_idx)) {
5771 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5772 return;
5774 /* Restore CPU state */
5775 gen_helper_40x_rfci();
5776 gen_sync_exception(ctx);
5777 #endif
5780 static void gen_rfci(DisasContext *ctx)
5782 #if defined(CONFIG_USER_ONLY)
5783 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5784 #else
5785 if (unlikely(!ctx->mem_idx)) {
5786 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5787 return;
5789 /* Restore CPU state */
5790 gen_helper_rfci();
5791 gen_sync_exception(ctx);
5792 #endif
5795 /* BookE specific */
5797 /* XXX: not implemented on 440 ? */
5798 static void gen_rfdi(DisasContext *ctx)
5800 #if defined(CONFIG_USER_ONLY)
5801 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5802 #else
5803 if (unlikely(!ctx->mem_idx)) {
5804 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5805 return;
5807 /* Restore CPU state */
5808 gen_helper_rfdi();
5809 gen_sync_exception(ctx);
5810 #endif
5813 /* XXX: not implemented on 440 ? */
5814 static void gen_rfmci(DisasContext *ctx)
5816 #if defined(CONFIG_USER_ONLY)
5817 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5818 #else
5819 if (unlikely(!ctx->mem_idx)) {
5820 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5821 return;
5823 /* Restore CPU state */
5824 gen_helper_rfmci();
5825 gen_sync_exception(ctx);
5826 #endif
5829 /* TLB management - PowerPC 405 implementation */
5831 /* tlbre */
5832 static void gen_tlbre_40x(DisasContext *ctx)
5834 #if defined(CONFIG_USER_ONLY)
5835 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5836 #else
5837 if (unlikely(!ctx->mem_idx)) {
5838 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5839 return;
5841 switch (rB(ctx->opcode)) {
5842 case 0:
5843 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5844 break;
5845 case 1:
5846 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5847 break;
5848 default:
5849 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5850 break;
5852 #endif
5855 /* tlbsx - tlbsx. */
5856 static void gen_tlbsx_40x(DisasContext *ctx)
5858 #if defined(CONFIG_USER_ONLY)
5859 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5860 #else
5861 TCGv t0;
5862 if (unlikely(!ctx->mem_idx)) {
5863 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5864 return;
5866 t0 = tcg_temp_new();
5867 gen_addr_reg_index(ctx, t0);
5868 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5869 tcg_temp_free(t0);
5870 if (Rc(ctx->opcode)) {
5871 int l1 = gen_new_label();
5872 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5873 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5874 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5875 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5876 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5877 gen_set_label(l1);
5879 #endif
5882 /* tlbwe */
5883 static void gen_tlbwe_40x(DisasContext *ctx)
5885 #if defined(CONFIG_USER_ONLY)
5886 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5887 #else
5888 if (unlikely(!ctx->mem_idx)) {
5889 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5890 return;
5892 switch (rB(ctx->opcode)) {
5893 case 0:
5894 gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5895 break;
5896 case 1:
5897 gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5898 break;
5899 default:
5900 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5901 break;
5903 #endif
5906 /* TLB management - PowerPC 440 implementation */
5908 /* tlbre */
5909 static void gen_tlbre_440(DisasContext *ctx)
5911 #if defined(CONFIG_USER_ONLY)
5912 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5913 #else
5914 if (unlikely(!ctx->mem_idx)) {
5915 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5916 return;
5918 switch (rB(ctx->opcode)) {
5919 case 0:
5920 case 1:
5921 case 2:
5923 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5924 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], t0, cpu_gpr[rA(ctx->opcode)]);
5925 tcg_temp_free_i32(t0);
5927 break;
5928 default:
5929 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5930 break;
5932 #endif
5935 /* tlbsx - tlbsx. */
5936 static void gen_tlbsx_440(DisasContext *ctx)
5938 #if defined(CONFIG_USER_ONLY)
5939 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5940 #else
5941 TCGv t0;
5942 if (unlikely(!ctx->mem_idx)) {
5943 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5944 return;
5946 t0 = tcg_temp_new();
5947 gen_addr_reg_index(ctx, t0);
5948 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0);
5949 tcg_temp_free(t0);
5950 if (Rc(ctx->opcode)) {
5951 int l1 = gen_new_label();
5952 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
5953 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
5954 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
5955 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5956 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5957 gen_set_label(l1);
5959 #endif
5962 /* tlbwe */
5963 static void gen_tlbwe_440(DisasContext *ctx)
5965 #if defined(CONFIG_USER_ONLY)
5966 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5967 #else
5968 if (unlikely(!ctx->mem_idx)) {
5969 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5970 return;
5972 switch (rB(ctx->opcode)) {
5973 case 0:
5974 case 1:
5975 case 2:
5977 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5978 gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5979 tcg_temp_free_i32(t0);
5981 break;
5982 default:
5983 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5984 break;
5986 #endif
5989 /* TLB management - PowerPC BookE 2.06 implementation */
5991 /* tlbre */
5992 static void gen_tlbre_booke206(DisasContext *ctx)
5994 #if defined(CONFIG_USER_ONLY)
5995 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5996 #else
5997 if (unlikely(!ctx->mem_idx)) {
5998 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5999 return;
6002 gen_helper_booke206_tlbre();
6003 #endif
6006 /* tlbsx - tlbsx. */
6007 static void gen_tlbsx_booke206(DisasContext *ctx)
6009 #if defined(CONFIG_USER_ONLY)
6010 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6011 #else
6012 TCGv t0;
6013 if (unlikely(!ctx->mem_idx)) {
6014 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6015 return;
6018 if (rA(ctx->opcode)) {
6019 t0 = tcg_temp_new();
6020 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6021 } else {
6022 t0 = tcg_const_tl(0);
6025 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6026 gen_helper_booke206_tlbsx(t0);
6027 #endif
6030 /* tlbwe */
6031 static void gen_tlbwe_booke206(DisasContext *ctx)
6033 #if defined(CONFIG_USER_ONLY)
6034 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6035 #else
6036 if (unlikely(!ctx->mem_idx)) {
6037 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6038 return;
6040 gen_helper_booke206_tlbwe();
6041 #endif
6044 static void gen_tlbivax_booke206(DisasContext *ctx)
6046 #if defined(CONFIG_USER_ONLY)
6047 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6048 #else
6049 TCGv t0;
6050 if (unlikely(!ctx->mem_idx)) {
6051 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6052 return;
6055 t0 = tcg_temp_new();
6056 gen_addr_reg_index(ctx, t0);
6058 gen_helper_booke206_tlbivax(t0);
6059 #endif
6063 /* wrtee */
6064 static void gen_wrtee(DisasContext *ctx)
6066 #if defined(CONFIG_USER_ONLY)
6067 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6068 #else
6069 TCGv t0;
6070 if (unlikely(!ctx->mem_idx)) {
6071 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6072 return;
6074 t0 = tcg_temp_new();
6075 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6076 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6077 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6078 tcg_temp_free(t0);
6079 /* Stop translation to have a chance to raise an exception
6080 * if we just set msr_ee to 1
6082 gen_stop_exception(ctx);
6083 #endif
6086 /* wrteei */
6087 static void gen_wrteei(DisasContext *ctx)
6089 #if defined(CONFIG_USER_ONLY)
6090 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6091 #else
6092 if (unlikely(!ctx->mem_idx)) {
6093 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6094 return;
6096 if (ctx->opcode & 0x00008000) {
6097 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6098 /* Stop translation to have a chance to raise an exception */
6099 gen_stop_exception(ctx);
6100 } else {
6101 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6103 #endif
6106 /* PowerPC 440 specific instructions */
6108 /* dlmzb */
6109 static void gen_dlmzb(DisasContext *ctx)
6111 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6112 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6113 cpu_gpr[rB(ctx->opcode)], t0);
6114 tcg_temp_free_i32(t0);
6117 /* mbar replaces eieio on 440 */
6118 static void gen_mbar(DisasContext *ctx)
6120 /* interpreted as no-op */
6123 /* msync replaces sync on 440 */
6124 static void gen_msync(DisasContext *ctx)
6126 /* interpreted as no-op */
6129 /* icbt */
6130 static void gen_icbt_440(DisasContext *ctx)
6132 /* interpreted as no-op */
6133 /* XXX: specification say this is treated as a load by the MMU
6134 * but does not generate any exception
6138 /*** Altivec vector extension ***/
6139 /* Altivec registers moves */
6141 static inline TCGv_ptr gen_avr_ptr(int reg)
6143 TCGv_ptr r = tcg_temp_new_ptr();
6144 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6145 return r;
6148 #define GEN_VR_LDX(name, opc2, opc3) \
6149 static void glue(gen_, name)(DisasContext *ctx) \
6151 TCGv EA; \
6152 if (unlikely(!ctx->altivec_enabled)) { \
6153 gen_exception(ctx, POWERPC_EXCP_VPU); \
6154 return; \
6156 gen_set_access_type(ctx, ACCESS_INT); \
6157 EA = tcg_temp_new(); \
6158 gen_addr_reg_index(ctx, EA); \
6159 tcg_gen_andi_tl(EA, EA, ~0xf); \
6160 if (ctx->le_mode) { \
6161 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6162 tcg_gen_addi_tl(EA, EA, 8); \
6163 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6164 } else { \
6165 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6166 tcg_gen_addi_tl(EA, EA, 8); \
6167 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6169 tcg_temp_free(EA); \
6172 #define GEN_VR_STX(name, opc2, opc3) \
6173 static void gen_st##name(DisasContext *ctx) \
6175 TCGv EA; \
6176 if (unlikely(!ctx->altivec_enabled)) { \
6177 gen_exception(ctx, POWERPC_EXCP_VPU); \
6178 return; \
6180 gen_set_access_type(ctx, ACCESS_INT); \
6181 EA = tcg_temp_new(); \
6182 gen_addr_reg_index(ctx, EA); \
6183 tcg_gen_andi_tl(EA, EA, ~0xf); \
6184 if (ctx->le_mode) { \
6185 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6186 tcg_gen_addi_tl(EA, EA, 8); \
6187 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6188 } else { \
6189 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6190 tcg_gen_addi_tl(EA, EA, 8); \
6191 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6193 tcg_temp_free(EA); \
6196 #define GEN_VR_LVE(name, opc2, opc3) \
6197 static void gen_lve##name(DisasContext *ctx) \
6199 TCGv EA; \
6200 TCGv_ptr rs; \
6201 if (unlikely(!ctx->altivec_enabled)) { \
6202 gen_exception(ctx, POWERPC_EXCP_VPU); \
6203 return; \
6205 gen_set_access_type(ctx, ACCESS_INT); \
6206 EA = tcg_temp_new(); \
6207 gen_addr_reg_index(ctx, EA); \
6208 rs = gen_avr_ptr(rS(ctx->opcode)); \
6209 gen_helper_lve##name (rs, EA); \
6210 tcg_temp_free(EA); \
6211 tcg_temp_free_ptr(rs); \
6214 #define GEN_VR_STVE(name, opc2, opc3) \
6215 static void gen_stve##name(DisasContext *ctx) \
6217 TCGv EA; \
6218 TCGv_ptr rs; \
6219 if (unlikely(!ctx->altivec_enabled)) { \
6220 gen_exception(ctx, POWERPC_EXCP_VPU); \
6221 return; \
6223 gen_set_access_type(ctx, ACCESS_INT); \
6224 EA = tcg_temp_new(); \
6225 gen_addr_reg_index(ctx, EA); \
6226 rs = gen_avr_ptr(rS(ctx->opcode)); \
6227 gen_helper_stve##name (rs, EA); \
6228 tcg_temp_free(EA); \
6229 tcg_temp_free_ptr(rs); \
6232 GEN_VR_LDX(lvx, 0x07, 0x03);
6233 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6234 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6236 GEN_VR_LVE(bx, 0x07, 0x00);
6237 GEN_VR_LVE(hx, 0x07, 0x01);
6238 GEN_VR_LVE(wx, 0x07, 0x02);
6240 GEN_VR_STX(svx, 0x07, 0x07);
6241 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6242 GEN_VR_STX(svxl, 0x07, 0x0F);
6244 GEN_VR_STVE(bx, 0x07, 0x04);
6245 GEN_VR_STVE(hx, 0x07, 0x05);
6246 GEN_VR_STVE(wx, 0x07, 0x06);
6248 static void gen_lvsl(DisasContext *ctx)
6250 TCGv_ptr rd;
6251 TCGv EA;
6252 if (unlikely(!ctx->altivec_enabled)) {
6253 gen_exception(ctx, POWERPC_EXCP_VPU);
6254 return;
6256 EA = tcg_temp_new();
6257 gen_addr_reg_index(ctx, EA);
6258 rd = gen_avr_ptr(rD(ctx->opcode));
6259 gen_helper_lvsl(rd, EA);
6260 tcg_temp_free(EA);
6261 tcg_temp_free_ptr(rd);
6264 static void gen_lvsr(DisasContext *ctx)
6266 TCGv_ptr rd;
6267 TCGv EA;
6268 if (unlikely(!ctx->altivec_enabled)) {
6269 gen_exception(ctx, POWERPC_EXCP_VPU);
6270 return;
6272 EA = tcg_temp_new();
6273 gen_addr_reg_index(ctx, EA);
6274 rd = gen_avr_ptr(rD(ctx->opcode));
6275 gen_helper_lvsr(rd, EA);
6276 tcg_temp_free(EA);
6277 tcg_temp_free_ptr(rd);
6280 static void gen_mfvscr(DisasContext *ctx)
6282 TCGv_i32 t;
6283 if (unlikely(!ctx->altivec_enabled)) {
6284 gen_exception(ctx, POWERPC_EXCP_VPU);
6285 return;
6287 tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6288 t = tcg_temp_new_i32();
6289 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, vscr));
6290 tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6291 tcg_temp_free_i32(t);
6294 static void gen_mtvscr(DisasContext *ctx)
6296 TCGv_ptr p;
6297 if (unlikely(!ctx->altivec_enabled)) {
6298 gen_exception(ctx, POWERPC_EXCP_VPU);
6299 return;
6301 p = gen_avr_ptr(rD(ctx->opcode));
6302 gen_helper_mtvscr(p);
6303 tcg_temp_free_ptr(p);
6306 /* Logical operations */
6307 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6308 static void glue(gen_, name)(DisasContext *ctx) \
6310 if (unlikely(!ctx->altivec_enabled)) { \
6311 gen_exception(ctx, POWERPC_EXCP_VPU); \
6312 return; \
6314 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6315 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6318 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6319 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6320 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6321 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6322 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6324 #define GEN_VXFORM(name, opc2, opc3) \
6325 static void glue(gen_, name)(DisasContext *ctx) \
6327 TCGv_ptr ra, rb, rd; \
6328 if (unlikely(!ctx->altivec_enabled)) { \
6329 gen_exception(ctx, POWERPC_EXCP_VPU); \
6330 return; \
6332 ra = gen_avr_ptr(rA(ctx->opcode)); \
6333 rb = gen_avr_ptr(rB(ctx->opcode)); \
6334 rd = gen_avr_ptr(rD(ctx->opcode)); \
6335 gen_helper_##name (rd, ra, rb); \
6336 tcg_temp_free_ptr(ra); \
6337 tcg_temp_free_ptr(rb); \
6338 tcg_temp_free_ptr(rd); \
6341 GEN_VXFORM(vaddubm, 0, 0);
6342 GEN_VXFORM(vadduhm, 0, 1);
6343 GEN_VXFORM(vadduwm, 0, 2);
6344 GEN_VXFORM(vsububm, 0, 16);
6345 GEN_VXFORM(vsubuhm, 0, 17);
6346 GEN_VXFORM(vsubuwm, 0, 18);
6347 GEN_VXFORM(vmaxub, 1, 0);
6348 GEN_VXFORM(vmaxuh, 1, 1);
6349 GEN_VXFORM(vmaxuw, 1, 2);
6350 GEN_VXFORM(vmaxsb, 1, 4);
6351 GEN_VXFORM(vmaxsh, 1, 5);
6352 GEN_VXFORM(vmaxsw, 1, 6);
6353 GEN_VXFORM(vminub, 1, 8);
6354 GEN_VXFORM(vminuh, 1, 9);
6355 GEN_VXFORM(vminuw, 1, 10);
6356 GEN_VXFORM(vminsb, 1, 12);
6357 GEN_VXFORM(vminsh, 1, 13);
6358 GEN_VXFORM(vminsw, 1, 14);
6359 GEN_VXFORM(vavgub, 1, 16);
6360 GEN_VXFORM(vavguh, 1, 17);
6361 GEN_VXFORM(vavguw, 1, 18);
6362 GEN_VXFORM(vavgsb, 1, 20);
6363 GEN_VXFORM(vavgsh, 1, 21);
6364 GEN_VXFORM(vavgsw, 1, 22);
6365 GEN_VXFORM(vmrghb, 6, 0);
6366 GEN_VXFORM(vmrghh, 6, 1);
6367 GEN_VXFORM(vmrghw, 6, 2);
6368 GEN_VXFORM(vmrglb, 6, 4);
6369 GEN_VXFORM(vmrglh, 6, 5);
6370 GEN_VXFORM(vmrglw, 6, 6);
6371 GEN_VXFORM(vmuloub, 4, 0);
6372 GEN_VXFORM(vmulouh, 4, 1);
6373 GEN_VXFORM(vmulosb, 4, 4);
6374 GEN_VXFORM(vmulosh, 4, 5);
6375 GEN_VXFORM(vmuleub, 4, 8);
6376 GEN_VXFORM(vmuleuh, 4, 9);
6377 GEN_VXFORM(vmulesb, 4, 12);
6378 GEN_VXFORM(vmulesh, 4, 13);
6379 GEN_VXFORM(vslb, 2, 4);
6380 GEN_VXFORM(vslh, 2, 5);
6381 GEN_VXFORM(vslw, 2, 6);
6382 GEN_VXFORM(vsrb, 2, 8);
6383 GEN_VXFORM(vsrh, 2, 9);
6384 GEN_VXFORM(vsrw, 2, 10);
6385 GEN_VXFORM(vsrab, 2, 12);
6386 GEN_VXFORM(vsrah, 2, 13);
6387 GEN_VXFORM(vsraw, 2, 14);
6388 GEN_VXFORM(vslo, 6, 16);
6389 GEN_VXFORM(vsro, 6, 17);
6390 GEN_VXFORM(vaddcuw, 0, 6);
6391 GEN_VXFORM(vsubcuw, 0, 22);
6392 GEN_VXFORM(vaddubs, 0, 8);
6393 GEN_VXFORM(vadduhs, 0, 9);
6394 GEN_VXFORM(vadduws, 0, 10);
6395 GEN_VXFORM(vaddsbs, 0, 12);
6396 GEN_VXFORM(vaddshs, 0, 13);
6397 GEN_VXFORM(vaddsws, 0, 14);
6398 GEN_VXFORM(vsububs, 0, 24);
6399 GEN_VXFORM(vsubuhs, 0, 25);
6400 GEN_VXFORM(vsubuws, 0, 26);
6401 GEN_VXFORM(vsubsbs, 0, 28);
6402 GEN_VXFORM(vsubshs, 0, 29);
6403 GEN_VXFORM(vsubsws, 0, 30);
6404 GEN_VXFORM(vrlb, 2, 0);
6405 GEN_VXFORM(vrlh, 2, 1);
6406 GEN_VXFORM(vrlw, 2, 2);
6407 GEN_VXFORM(vsl, 2, 7);
6408 GEN_VXFORM(vsr, 2, 11);
6409 GEN_VXFORM(vpkuhum, 7, 0);
6410 GEN_VXFORM(vpkuwum, 7, 1);
6411 GEN_VXFORM(vpkuhus, 7, 2);
6412 GEN_VXFORM(vpkuwus, 7, 3);
6413 GEN_VXFORM(vpkshus, 7, 4);
6414 GEN_VXFORM(vpkswus, 7, 5);
6415 GEN_VXFORM(vpkshss, 7, 6);
6416 GEN_VXFORM(vpkswss, 7, 7);
6417 GEN_VXFORM(vpkpx, 7, 12);
6418 GEN_VXFORM(vsum4ubs, 4, 24);
6419 GEN_VXFORM(vsum4sbs, 4, 28);
6420 GEN_VXFORM(vsum4shs, 4, 25);
6421 GEN_VXFORM(vsum2sws, 4, 26);
6422 GEN_VXFORM(vsumsws, 4, 30);
6423 GEN_VXFORM(vaddfp, 5, 0);
6424 GEN_VXFORM(vsubfp, 5, 1);
6425 GEN_VXFORM(vmaxfp, 5, 16);
6426 GEN_VXFORM(vminfp, 5, 17);
6428 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6429 static void glue(gen_, name)(DisasContext *ctx) \
6431 TCGv_ptr ra, rb, rd; \
6432 if (unlikely(!ctx->altivec_enabled)) { \
6433 gen_exception(ctx, POWERPC_EXCP_VPU); \
6434 return; \
6436 ra = gen_avr_ptr(rA(ctx->opcode)); \
6437 rb = gen_avr_ptr(rB(ctx->opcode)); \
6438 rd = gen_avr_ptr(rD(ctx->opcode)); \
6439 gen_helper_##opname (rd, ra, rb); \
6440 tcg_temp_free_ptr(ra); \
6441 tcg_temp_free_ptr(rb); \
6442 tcg_temp_free_ptr(rd); \
6445 #define GEN_VXRFORM(name, opc2, opc3) \
6446 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6447 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6449 GEN_VXRFORM(vcmpequb, 3, 0)
6450 GEN_VXRFORM(vcmpequh, 3, 1)
6451 GEN_VXRFORM(vcmpequw, 3, 2)
6452 GEN_VXRFORM(vcmpgtsb, 3, 12)
6453 GEN_VXRFORM(vcmpgtsh, 3, 13)
6454 GEN_VXRFORM(vcmpgtsw, 3, 14)
6455 GEN_VXRFORM(vcmpgtub, 3, 8)
6456 GEN_VXRFORM(vcmpgtuh, 3, 9)
6457 GEN_VXRFORM(vcmpgtuw, 3, 10)
6458 GEN_VXRFORM(vcmpeqfp, 3, 3)
6459 GEN_VXRFORM(vcmpgefp, 3, 7)
6460 GEN_VXRFORM(vcmpgtfp, 3, 11)
6461 GEN_VXRFORM(vcmpbfp, 3, 15)
6463 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6464 static void glue(gen_, name)(DisasContext *ctx) \
6466 TCGv_ptr rd; \
6467 TCGv_i32 simm; \
6468 if (unlikely(!ctx->altivec_enabled)) { \
6469 gen_exception(ctx, POWERPC_EXCP_VPU); \
6470 return; \
6472 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6473 rd = gen_avr_ptr(rD(ctx->opcode)); \
6474 gen_helper_##name (rd, simm); \
6475 tcg_temp_free_i32(simm); \
6476 tcg_temp_free_ptr(rd); \
6479 GEN_VXFORM_SIMM(vspltisb, 6, 12);
6480 GEN_VXFORM_SIMM(vspltish, 6, 13);
6481 GEN_VXFORM_SIMM(vspltisw, 6, 14);
6483 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6484 static void glue(gen_, name)(DisasContext *ctx) \
6486 TCGv_ptr rb, rd; \
6487 if (unlikely(!ctx->altivec_enabled)) { \
6488 gen_exception(ctx, POWERPC_EXCP_VPU); \
6489 return; \
6491 rb = gen_avr_ptr(rB(ctx->opcode)); \
6492 rd = gen_avr_ptr(rD(ctx->opcode)); \
6493 gen_helper_##name (rd, rb); \
6494 tcg_temp_free_ptr(rb); \
6495 tcg_temp_free_ptr(rd); \
6498 GEN_VXFORM_NOA(vupkhsb, 7, 8);
6499 GEN_VXFORM_NOA(vupkhsh, 7, 9);
6500 GEN_VXFORM_NOA(vupklsb, 7, 10);
6501 GEN_VXFORM_NOA(vupklsh, 7, 11);
6502 GEN_VXFORM_NOA(vupkhpx, 7, 13);
6503 GEN_VXFORM_NOA(vupklpx, 7, 15);
6504 GEN_VXFORM_NOA(vrefp, 5, 4);
6505 GEN_VXFORM_NOA(vrsqrtefp, 5, 5);
6506 GEN_VXFORM_NOA(vexptefp, 5, 6);
6507 GEN_VXFORM_NOA(vlogefp, 5, 7);
6508 GEN_VXFORM_NOA(vrfim, 5, 8);
6509 GEN_VXFORM_NOA(vrfin, 5, 9);
6510 GEN_VXFORM_NOA(vrfip, 5, 10);
6511 GEN_VXFORM_NOA(vrfiz, 5, 11);
6513 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6514 static void glue(gen_, name)(DisasContext *ctx) \
6516 TCGv_ptr rd; \
6517 TCGv_i32 simm; \
6518 if (unlikely(!ctx->altivec_enabled)) { \
6519 gen_exception(ctx, POWERPC_EXCP_VPU); \
6520 return; \
6522 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6523 rd = gen_avr_ptr(rD(ctx->opcode)); \
6524 gen_helper_##name (rd, simm); \
6525 tcg_temp_free_i32(simm); \
6526 tcg_temp_free_ptr(rd); \
6529 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6530 static void glue(gen_, name)(DisasContext *ctx) \
6532 TCGv_ptr rb, rd; \
6533 TCGv_i32 uimm; \
6534 if (unlikely(!ctx->altivec_enabled)) { \
6535 gen_exception(ctx, POWERPC_EXCP_VPU); \
6536 return; \
6538 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6539 rb = gen_avr_ptr(rB(ctx->opcode)); \
6540 rd = gen_avr_ptr(rD(ctx->opcode)); \
6541 gen_helper_##name (rd, rb, uimm); \
6542 tcg_temp_free_i32(uimm); \
6543 tcg_temp_free_ptr(rb); \
6544 tcg_temp_free_ptr(rd); \
6547 GEN_VXFORM_UIMM(vspltb, 6, 8);
6548 GEN_VXFORM_UIMM(vsplth, 6, 9);
6549 GEN_VXFORM_UIMM(vspltw, 6, 10);
6550 GEN_VXFORM_UIMM(vcfux, 5, 12);
6551 GEN_VXFORM_UIMM(vcfsx, 5, 13);
6552 GEN_VXFORM_UIMM(vctuxs, 5, 14);
6553 GEN_VXFORM_UIMM(vctsxs, 5, 15);
6555 static void gen_vsldoi(DisasContext *ctx)
6557 TCGv_ptr ra, rb, rd;
6558 TCGv_i32 sh;
6559 if (unlikely(!ctx->altivec_enabled)) {
6560 gen_exception(ctx, POWERPC_EXCP_VPU);
6561 return;
6563 ra = gen_avr_ptr(rA(ctx->opcode));
6564 rb = gen_avr_ptr(rB(ctx->opcode));
6565 rd = gen_avr_ptr(rD(ctx->opcode));
6566 sh = tcg_const_i32(VSH(ctx->opcode));
6567 gen_helper_vsldoi (rd, ra, rb, sh);
6568 tcg_temp_free_ptr(ra);
6569 tcg_temp_free_ptr(rb);
6570 tcg_temp_free_ptr(rd);
6571 tcg_temp_free_i32(sh);
6574 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6575 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6577 TCGv_ptr ra, rb, rc, rd; \
6578 if (unlikely(!ctx->altivec_enabled)) { \
6579 gen_exception(ctx, POWERPC_EXCP_VPU); \
6580 return; \
6582 ra = gen_avr_ptr(rA(ctx->opcode)); \
6583 rb = gen_avr_ptr(rB(ctx->opcode)); \
6584 rc = gen_avr_ptr(rC(ctx->opcode)); \
6585 rd = gen_avr_ptr(rD(ctx->opcode)); \
6586 if (Rc(ctx->opcode)) { \
6587 gen_helper_##name1 (rd, ra, rb, rc); \
6588 } else { \
6589 gen_helper_##name0 (rd, ra, rb, rc); \
6591 tcg_temp_free_ptr(ra); \
6592 tcg_temp_free_ptr(rb); \
6593 tcg_temp_free_ptr(rc); \
6594 tcg_temp_free_ptr(rd); \
6597 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
6599 static void gen_vmladduhm(DisasContext *ctx)
6601 TCGv_ptr ra, rb, rc, rd;
6602 if (unlikely(!ctx->altivec_enabled)) {
6603 gen_exception(ctx, POWERPC_EXCP_VPU);
6604 return;
6606 ra = gen_avr_ptr(rA(ctx->opcode));
6607 rb = gen_avr_ptr(rB(ctx->opcode));
6608 rc = gen_avr_ptr(rC(ctx->opcode));
6609 rd = gen_avr_ptr(rD(ctx->opcode));
6610 gen_helper_vmladduhm(rd, ra, rb, rc);
6611 tcg_temp_free_ptr(ra);
6612 tcg_temp_free_ptr(rb);
6613 tcg_temp_free_ptr(rc);
6614 tcg_temp_free_ptr(rd);
6617 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
6618 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
6619 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
6620 GEN_VAFORM_PAIRED(vsel, vperm, 21)
6621 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
6623 /*** SPE extension ***/
6624 /* Register moves */
6627 static inline void gen_evmra(DisasContext *ctx)
6630 if (unlikely(!ctx->spe_enabled)) {
6631 gen_exception(ctx, POWERPC_EXCP_APU);
6632 return;
6635 #if defined(TARGET_PPC64)
6636 /* rD := rA */
6637 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6639 /* spe_acc := rA */
6640 tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
6641 cpu_env,
6642 offsetof(CPUState, spe_acc));
6643 #else
6644 TCGv_i64 tmp = tcg_temp_new_i64();
6646 /* tmp := rA_lo + rA_hi << 32 */
6647 tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6649 /* spe_acc := tmp */
6650 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
6651 tcg_temp_free_i64(tmp);
6653 /* rD := rA */
6654 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6655 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6656 #endif
6659 static inline void gen_load_gpr64(TCGv_i64 t, int reg)
6661 #if defined(TARGET_PPC64)
6662 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6663 #else
6664 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6665 #endif
6668 static inline void gen_store_gpr64(int reg, TCGv_i64 t)
6670 #if defined(TARGET_PPC64)
6671 tcg_gen_mov_i64(cpu_gpr[reg], t);
6672 #else
6673 TCGv_i64 tmp = tcg_temp_new_i64();
6674 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6675 tcg_gen_shri_i64(tmp, t, 32);
6676 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6677 tcg_temp_free_i64(tmp);
6678 #endif
6681 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6682 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6684 if (Rc(ctx->opcode)) \
6685 gen_##name1(ctx); \
6686 else \
6687 gen_##name0(ctx); \
6690 /* Handler for undefined SPE opcodes */
6691 static inline void gen_speundef(DisasContext *ctx)
6693 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6696 /* SPE logic */
6697 #if defined(TARGET_PPC64)
6698 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6699 static inline void gen_##name(DisasContext *ctx) \
6701 if (unlikely(!ctx->spe_enabled)) { \
6702 gen_exception(ctx, POWERPC_EXCP_APU); \
6703 return; \
6705 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6706 cpu_gpr[rB(ctx->opcode)]); \
6708 #else
6709 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6710 static inline void gen_##name(DisasContext *ctx) \
6712 if (unlikely(!ctx->spe_enabled)) { \
6713 gen_exception(ctx, POWERPC_EXCP_APU); \
6714 return; \
6716 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6717 cpu_gpr[rB(ctx->opcode)]); \
6718 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6719 cpu_gprh[rB(ctx->opcode)]); \
6721 #endif
6723 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6724 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6725 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6726 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6727 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6728 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6729 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6730 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6732 /* SPE logic immediate */
6733 #if defined(TARGET_PPC64)
6734 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6735 static inline void gen_##name(DisasContext *ctx) \
6737 if (unlikely(!ctx->spe_enabled)) { \
6738 gen_exception(ctx, POWERPC_EXCP_APU); \
6739 return; \
6741 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6742 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6743 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6744 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6745 tcg_opi(t0, t0, rB(ctx->opcode)); \
6746 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6747 tcg_gen_trunc_i64_i32(t1, t2); \
6748 tcg_temp_free_i64(t2); \
6749 tcg_opi(t1, t1, rB(ctx->opcode)); \
6750 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6751 tcg_temp_free_i32(t0); \
6752 tcg_temp_free_i32(t1); \
6754 #else
6755 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6756 static inline void gen_##name(DisasContext *ctx) \
6758 if (unlikely(!ctx->spe_enabled)) { \
6759 gen_exception(ctx, POWERPC_EXCP_APU); \
6760 return; \
6762 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6763 rB(ctx->opcode)); \
6764 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6765 rB(ctx->opcode)); \
6767 #endif
6768 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6769 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6770 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6771 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6773 /* SPE arithmetic */
6774 #if defined(TARGET_PPC64)
6775 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6776 static inline void gen_##name(DisasContext *ctx) \
6778 if (unlikely(!ctx->spe_enabled)) { \
6779 gen_exception(ctx, POWERPC_EXCP_APU); \
6780 return; \
6782 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6783 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6784 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6785 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6786 tcg_op(t0, t0); \
6787 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6788 tcg_gen_trunc_i64_i32(t1, t2); \
6789 tcg_temp_free_i64(t2); \
6790 tcg_op(t1, t1); \
6791 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6792 tcg_temp_free_i32(t0); \
6793 tcg_temp_free_i32(t1); \
6795 #else
6796 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6797 static inline void gen_##name(DisasContext *ctx) \
6799 if (unlikely(!ctx->spe_enabled)) { \
6800 gen_exception(ctx, POWERPC_EXCP_APU); \
6801 return; \
6803 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6804 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6806 #endif
6808 static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
6810 int l1 = gen_new_label();
6811 int l2 = gen_new_label();
6813 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6814 tcg_gen_neg_i32(ret, arg1);
6815 tcg_gen_br(l2);
6816 gen_set_label(l1);
6817 tcg_gen_mov_i32(ret, arg1);
6818 gen_set_label(l2);
6820 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6821 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6822 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6823 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6824 static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
6826 tcg_gen_addi_i32(ret, arg1, 0x8000);
6827 tcg_gen_ext16u_i32(ret, ret);
6829 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6830 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6831 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6833 #if defined(TARGET_PPC64)
6834 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6835 static inline void gen_##name(DisasContext *ctx) \
6837 if (unlikely(!ctx->spe_enabled)) { \
6838 gen_exception(ctx, POWERPC_EXCP_APU); \
6839 return; \
6841 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6842 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6843 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6844 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
6845 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6846 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6847 tcg_op(t0, t0, t2); \
6848 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6849 tcg_gen_trunc_i64_i32(t1, t3); \
6850 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6851 tcg_gen_trunc_i64_i32(t2, t3); \
6852 tcg_temp_free_i64(t3); \
6853 tcg_op(t1, t1, t2); \
6854 tcg_temp_free_i32(t2); \
6855 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6856 tcg_temp_free_i32(t0); \
6857 tcg_temp_free_i32(t1); \
6859 #else
6860 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6861 static inline void gen_##name(DisasContext *ctx) \
6863 if (unlikely(!ctx->spe_enabled)) { \
6864 gen_exception(ctx, POWERPC_EXCP_APU); \
6865 return; \
6867 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6868 cpu_gpr[rB(ctx->opcode)]); \
6869 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6870 cpu_gprh[rB(ctx->opcode)]); \
6872 #endif
6874 static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6876 TCGv_i32 t0;
6877 int l1, l2;
6879 l1 = gen_new_label();
6880 l2 = gen_new_label();
6881 t0 = tcg_temp_local_new_i32();
6882 /* No error here: 6 bits are used */
6883 tcg_gen_andi_i32(t0, arg2, 0x3F);
6884 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6885 tcg_gen_shr_i32(ret, arg1, t0);
6886 tcg_gen_br(l2);
6887 gen_set_label(l1);
6888 tcg_gen_movi_i32(ret, 0);
6889 gen_set_label(l2);
6890 tcg_temp_free_i32(t0);
6892 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6893 static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6895 TCGv_i32 t0;
6896 int l1, l2;
6898 l1 = gen_new_label();
6899 l2 = gen_new_label();
6900 t0 = tcg_temp_local_new_i32();
6901 /* No error here: 6 bits are used */
6902 tcg_gen_andi_i32(t0, arg2, 0x3F);
6903 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6904 tcg_gen_sar_i32(ret, arg1, t0);
6905 tcg_gen_br(l2);
6906 gen_set_label(l1);
6907 tcg_gen_movi_i32(ret, 0);
6908 gen_set_label(l2);
6909 tcg_temp_free_i32(t0);
6911 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6912 static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6914 TCGv_i32 t0;
6915 int l1, l2;
6917 l1 = gen_new_label();
6918 l2 = gen_new_label();
6919 t0 = tcg_temp_local_new_i32();
6920 /* No error here: 6 bits are used */
6921 tcg_gen_andi_i32(t0, arg2, 0x3F);
6922 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6923 tcg_gen_shl_i32(ret, arg1, t0);
6924 tcg_gen_br(l2);
6925 gen_set_label(l1);
6926 tcg_gen_movi_i32(ret, 0);
6927 gen_set_label(l2);
6928 tcg_temp_free_i32(t0);
6930 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6931 static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6933 TCGv_i32 t0 = tcg_temp_new_i32();
6934 tcg_gen_andi_i32(t0, arg2, 0x1F);
6935 tcg_gen_rotl_i32(ret, arg1, t0);
6936 tcg_temp_free_i32(t0);
6938 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6939 static inline void gen_evmergehi(DisasContext *ctx)
6941 if (unlikely(!ctx->spe_enabled)) {
6942 gen_exception(ctx, POWERPC_EXCP_APU);
6943 return;
6945 #if defined(TARGET_PPC64)
6946 TCGv t0 = tcg_temp_new();
6947 TCGv t1 = tcg_temp_new();
6948 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6949 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6950 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6951 tcg_temp_free(t0);
6952 tcg_temp_free(t1);
6953 #else
6954 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6955 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6956 #endif
6958 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6959 static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6961 tcg_gen_sub_i32(ret, arg2, arg1);
6963 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6965 /* SPE arithmetic immediate */
6966 #if defined(TARGET_PPC64)
6967 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6968 static inline void gen_##name(DisasContext *ctx) \
6970 if (unlikely(!ctx->spe_enabled)) { \
6971 gen_exception(ctx, POWERPC_EXCP_APU); \
6972 return; \
6974 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6975 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6976 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6977 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6978 tcg_op(t0, t0, rA(ctx->opcode)); \
6979 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6980 tcg_gen_trunc_i64_i32(t1, t2); \
6981 tcg_temp_free_i64(t2); \
6982 tcg_op(t1, t1, rA(ctx->opcode)); \
6983 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6984 tcg_temp_free_i32(t0); \
6985 tcg_temp_free_i32(t1); \
6987 #else
6988 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6989 static inline void gen_##name(DisasContext *ctx) \
6991 if (unlikely(!ctx->spe_enabled)) { \
6992 gen_exception(ctx, POWERPC_EXCP_APU); \
6993 return; \
6995 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6996 rA(ctx->opcode)); \
6997 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6998 rA(ctx->opcode)); \
7000 #endif
7001 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
7002 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
7004 /* SPE comparison */
7005 #if defined(TARGET_PPC64)
7006 #define GEN_SPEOP_COMP(name, tcg_cond) \
7007 static inline void gen_##name(DisasContext *ctx) \
7009 if (unlikely(!ctx->spe_enabled)) { \
7010 gen_exception(ctx, POWERPC_EXCP_APU); \
7011 return; \
7013 int l1 = gen_new_label(); \
7014 int l2 = gen_new_label(); \
7015 int l3 = gen_new_label(); \
7016 int l4 = gen_new_label(); \
7017 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7018 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7019 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7020 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7021 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7022 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7023 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7024 tcg_gen_br(l2); \
7025 gen_set_label(l1); \
7026 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7027 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7028 gen_set_label(l2); \
7029 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7030 tcg_gen_trunc_i64_i32(t0, t2); \
7031 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7032 tcg_gen_trunc_i64_i32(t1, t2); \
7033 tcg_temp_free_i64(t2); \
7034 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7035 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7036 ~(CRF_CH | CRF_CH_AND_CL)); \
7037 tcg_gen_br(l4); \
7038 gen_set_label(l3); \
7039 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7040 CRF_CH | CRF_CH_OR_CL); \
7041 gen_set_label(l4); \
7042 tcg_temp_free_i32(t0); \
7043 tcg_temp_free_i32(t1); \
7045 #else
7046 #define GEN_SPEOP_COMP(name, tcg_cond) \
7047 static inline void gen_##name(DisasContext *ctx) \
7049 if (unlikely(!ctx->spe_enabled)) { \
7050 gen_exception(ctx, POWERPC_EXCP_APU); \
7051 return; \
7053 int l1 = gen_new_label(); \
7054 int l2 = gen_new_label(); \
7055 int l3 = gen_new_label(); \
7056 int l4 = gen_new_label(); \
7058 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7059 cpu_gpr[rB(ctx->opcode)], l1); \
7060 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7061 tcg_gen_br(l2); \
7062 gen_set_label(l1); \
7063 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7064 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7065 gen_set_label(l2); \
7066 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7067 cpu_gprh[rB(ctx->opcode)], l3); \
7068 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7069 ~(CRF_CH | CRF_CH_AND_CL)); \
7070 tcg_gen_br(l4); \
7071 gen_set_label(l3); \
7072 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7073 CRF_CH | CRF_CH_OR_CL); \
7074 gen_set_label(l4); \
7076 #endif
7077 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
7078 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
7079 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
7080 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
7081 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
7083 /* SPE misc */
7084 static inline void gen_brinc(DisasContext *ctx)
7086 /* Note: brinc is usable even if SPE is disabled */
7087 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
7088 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7090 static inline void gen_evmergelo(DisasContext *ctx)
7092 if (unlikely(!ctx->spe_enabled)) {
7093 gen_exception(ctx, POWERPC_EXCP_APU);
7094 return;
7096 #if defined(TARGET_PPC64)
7097 TCGv t0 = tcg_temp_new();
7098 TCGv t1 = tcg_temp_new();
7099 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7100 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7101 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7102 tcg_temp_free(t0);
7103 tcg_temp_free(t1);
7104 #else
7105 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7106 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7107 #endif
7109 static inline void gen_evmergehilo(DisasContext *ctx)
7111 if (unlikely(!ctx->spe_enabled)) {
7112 gen_exception(ctx, POWERPC_EXCP_APU);
7113 return;
7115 #if defined(TARGET_PPC64)
7116 TCGv t0 = tcg_temp_new();
7117 TCGv t1 = tcg_temp_new();
7118 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
7119 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
7120 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7121 tcg_temp_free(t0);
7122 tcg_temp_free(t1);
7123 #else
7124 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7125 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7126 #endif
7128 static inline void gen_evmergelohi(DisasContext *ctx)
7130 if (unlikely(!ctx->spe_enabled)) {
7131 gen_exception(ctx, POWERPC_EXCP_APU);
7132 return;
7134 #if defined(TARGET_PPC64)
7135 TCGv t0 = tcg_temp_new();
7136 TCGv t1 = tcg_temp_new();
7137 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
7138 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
7139 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
7140 tcg_temp_free(t0);
7141 tcg_temp_free(t1);
7142 #else
7143 if (rD(ctx->opcode) == rA(ctx->opcode)) {
7144 TCGv_i32 tmp = tcg_temp_new_i32();
7145 tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
7146 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7147 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
7148 tcg_temp_free_i32(tmp);
7149 } else {
7150 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7151 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7153 #endif
7155 static inline void gen_evsplati(DisasContext *ctx)
7157 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
7159 #if defined(TARGET_PPC64)
7160 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7161 #else
7162 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7163 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7164 #endif
7166 static inline void gen_evsplatfi(DisasContext *ctx)
7168 uint64_t imm = rA(ctx->opcode) << 27;
7170 #if defined(TARGET_PPC64)
7171 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
7172 #else
7173 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
7174 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
7175 #endif
7178 static inline void gen_evsel(DisasContext *ctx)
7180 int l1 = gen_new_label();
7181 int l2 = gen_new_label();
7182 int l3 = gen_new_label();
7183 int l4 = gen_new_label();
7184 TCGv_i32 t0 = tcg_temp_local_new_i32();
7185 #if defined(TARGET_PPC64)
7186 TCGv t1 = tcg_temp_local_new();
7187 TCGv t2 = tcg_temp_local_new();
7188 #endif
7189 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
7190 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
7191 #if defined(TARGET_PPC64)
7192 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7193 #else
7194 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7195 #endif
7196 tcg_gen_br(l2);
7197 gen_set_label(l1);
7198 #if defined(TARGET_PPC64)
7199 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
7200 #else
7201 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
7202 #endif
7203 gen_set_label(l2);
7204 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
7205 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
7206 #if defined(TARGET_PPC64)
7207 tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
7208 #else
7209 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7210 #endif
7211 tcg_gen_br(l4);
7212 gen_set_label(l3);
7213 #if defined(TARGET_PPC64)
7214 tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
7215 #else
7216 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7217 #endif
7218 gen_set_label(l4);
7219 tcg_temp_free_i32(t0);
7220 #if defined(TARGET_PPC64)
7221 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
7222 tcg_temp_free(t1);
7223 tcg_temp_free(t2);
7224 #endif
7227 static void gen_evsel0(DisasContext *ctx)
7229 gen_evsel(ctx);
7232 static void gen_evsel1(DisasContext *ctx)
7234 gen_evsel(ctx);
7237 static void gen_evsel2(DisasContext *ctx)
7239 gen_evsel(ctx);
7242 static void gen_evsel3(DisasContext *ctx)
7244 gen_evsel(ctx);
7247 /* Multiply */
7249 static inline void gen_evmwumi(DisasContext *ctx)
7251 TCGv_i64 t0, t1;
7253 if (unlikely(!ctx->spe_enabled)) {
7254 gen_exception(ctx, POWERPC_EXCP_APU);
7255 return;
7258 t0 = tcg_temp_new_i64();
7259 t1 = tcg_temp_new_i64();
7261 /* t0 := rA; t1 := rB */
7262 #if defined(TARGET_PPC64)
7263 tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7264 tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7265 #else
7266 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7267 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7268 #endif
7270 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7272 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7274 tcg_temp_free_i64(t0);
7275 tcg_temp_free_i64(t1);
7278 static inline void gen_evmwumia(DisasContext *ctx)
7280 TCGv_i64 tmp;
7282 if (unlikely(!ctx->spe_enabled)) {
7283 gen_exception(ctx, POWERPC_EXCP_APU);
7284 return;
7287 gen_evmwumi(ctx); /* rD := rA * rB */
7289 tmp = tcg_temp_new_i64();
7291 /* acc := rD */
7292 gen_load_gpr64(tmp, rD(ctx->opcode));
7293 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7294 tcg_temp_free_i64(tmp);
7297 static inline void gen_evmwumiaa(DisasContext *ctx)
7299 TCGv_i64 acc;
7300 TCGv_i64 tmp;
7302 if (unlikely(!ctx->spe_enabled)) {
7303 gen_exception(ctx, POWERPC_EXCP_APU);
7304 return;
7307 gen_evmwumi(ctx); /* rD := rA * rB */
7309 acc = tcg_temp_new_i64();
7310 tmp = tcg_temp_new_i64();
7312 /* tmp := rD */
7313 gen_load_gpr64(tmp, rD(ctx->opcode));
7315 /* Load acc */
7316 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7318 /* acc := tmp + acc */
7319 tcg_gen_add_i64(acc, acc, tmp);
7321 /* Store acc */
7322 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7324 /* rD := acc */
7325 gen_store_gpr64(rD(ctx->opcode), acc);
7327 tcg_temp_free_i64(acc);
7328 tcg_temp_free_i64(tmp);
7331 static inline void gen_evmwsmi(DisasContext *ctx)
7333 TCGv_i64 t0, t1;
7335 if (unlikely(!ctx->spe_enabled)) {
7336 gen_exception(ctx, POWERPC_EXCP_APU);
7337 return;
7340 t0 = tcg_temp_new_i64();
7341 t1 = tcg_temp_new_i64();
7343 /* t0 := rA; t1 := rB */
7344 #if defined(TARGET_PPC64)
7345 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
7346 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
7347 #else
7348 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
7349 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
7350 #endif
7352 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
7354 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
7356 tcg_temp_free_i64(t0);
7357 tcg_temp_free_i64(t1);
7360 static inline void gen_evmwsmia(DisasContext *ctx)
7362 TCGv_i64 tmp;
7364 gen_evmwsmi(ctx); /* rD := rA * rB */
7366 tmp = tcg_temp_new_i64();
7368 /* acc := rD */
7369 gen_load_gpr64(tmp, rD(ctx->opcode));
7370 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUState, spe_acc));
7372 tcg_temp_free_i64(tmp);
7375 static inline void gen_evmwsmiaa(DisasContext *ctx)
7377 TCGv_i64 acc = tcg_temp_new_i64();
7378 TCGv_i64 tmp = tcg_temp_new_i64();
7380 gen_evmwsmi(ctx); /* rD := rA * rB */
7382 acc = tcg_temp_new_i64();
7383 tmp = tcg_temp_new_i64();
7385 /* tmp := rD */
7386 gen_load_gpr64(tmp, rD(ctx->opcode));
7388 /* Load acc */
7389 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7391 /* acc := tmp + acc */
7392 tcg_gen_add_i64(acc, acc, tmp);
7394 /* Store acc */
7395 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUState, spe_acc));
7397 /* rD := acc */
7398 gen_store_gpr64(rD(ctx->opcode), acc);
7400 tcg_temp_free_i64(acc);
7401 tcg_temp_free_i64(tmp);
7404 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
7405 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
7406 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
7407 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
7408 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
7409 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
7410 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
7411 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
7412 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE);
7413 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
7414 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
7415 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
7416 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
7417 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7418 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7419 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7420 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
7421 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
7422 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
7423 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
7424 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
7425 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
7426 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
7427 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
7428 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
7429 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
7430 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
7431 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
7432 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
7434 /* SPE load and stores */
7435 static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
7437 target_ulong uimm = rB(ctx->opcode);
7439 if (rA(ctx->opcode) == 0) {
7440 tcg_gen_movi_tl(EA, uimm << sh);
7441 } else {
7442 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
7443 #if defined(TARGET_PPC64)
7444 if (!ctx->sf_mode) {
7445 tcg_gen_ext32u_tl(EA, EA);
7447 #endif
7451 static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
7453 #if defined(TARGET_PPC64)
7454 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7455 #else
7456 TCGv_i64 t0 = tcg_temp_new_i64();
7457 gen_qemu_ld64(ctx, t0, addr);
7458 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
7459 tcg_gen_shri_i64(t0, t0, 32);
7460 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
7461 tcg_temp_free_i64(t0);
7462 #endif
7465 static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
7467 #if defined(TARGET_PPC64)
7468 TCGv t0 = tcg_temp_new();
7469 gen_qemu_ld32u(ctx, t0, addr);
7470 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7471 gen_addr_add(ctx, addr, addr, 4);
7472 gen_qemu_ld32u(ctx, t0, addr);
7473 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7474 tcg_temp_free(t0);
7475 #else
7476 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7477 gen_addr_add(ctx, addr, addr, 4);
7478 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7479 #endif
7482 static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
7484 TCGv t0 = tcg_temp_new();
7485 #if defined(TARGET_PPC64)
7486 gen_qemu_ld16u(ctx, t0, addr);
7487 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7488 gen_addr_add(ctx, addr, addr, 2);
7489 gen_qemu_ld16u(ctx, t0, addr);
7490 tcg_gen_shli_tl(t0, t0, 32);
7491 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7492 gen_addr_add(ctx, addr, addr, 2);
7493 gen_qemu_ld16u(ctx, t0, addr);
7494 tcg_gen_shli_tl(t0, t0, 16);
7495 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7496 gen_addr_add(ctx, addr, addr, 2);
7497 gen_qemu_ld16u(ctx, t0, addr);
7498 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7499 #else
7500 gen_qemu_ld16u(ctx, t0, addr);
7501 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7502 gen_addr_add(ctx, addr, addr, 2);
7503 gen_qemu_ld16u(ctx, t0, addr);
7504 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7505 gen_addr_add(ctx, addr, addr, 2);
7506 gen_qemu_ld16u(ctx, t0, addr);
7507 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7508 gen_addr_add(ctx, addr, addr, 2);
7509 gen_qemu_ld16u(ctx, t0, addr);
7510 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7511 #endif
7512 tcg_temp_free(t0);
7515 static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
7517 TCGv t0 = tcg_temp_new();
7518 gen_qemu_ld16u(ctx, t0, addr);
7519 #if defined(TARGET_PPC64)
7520 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7521 tcg_gen_shli_tl(t0, t0, 16);
7522 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7523 #else
7524 tcg_gen_shli_tl(t0, t0, 16);
7525 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7526 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7527 #endif
7528 tcg_temp_free(t0);
7531 static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
7533 TCGv t0 = tcg_temp_new();
7534 gen_qemu_ld16u(ctx, t0, addr);
7535 #if defined(TARGET_PPC64)
7536 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7537 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7538 #else
7539 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7540 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7541 #endif
7542 tcg_temp_free(t0);
7545 static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
7547 TCGv t0 = tcg_temp_new();
7548 gen_qemu_ld16s(ctx, t0, addr);
7549 #if defined(TARGET_PPC64)
7550 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7551 tcg_gen_ext32u_tl(t0, t0);
7552 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7553 #else
7554 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7555 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7556 #endif
7557 tcg_temp_free(t0);
7560 static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
7562 TCGv t0 = tcg_temp_new();
7563 #if defined(TARGET_PPC64)
7564 gen_qemu_ld16u(ctx, t0, addr);
7565 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7566 gen_addr_add(ctx, addr, addr, 2);
7567 gen_qemu_ld16u(ctx, t0, addr);
7568 tcg_gen_shli_tl(t0, t0, 16);
7569 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7570 #else
7571 gen_qemu_ld16u(ctx, t0, addr);
7572 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7573 gen_addr_add(ctx, addr, addr, 2);
7574 gen_qemu_ld16u(ctx, t0, addr);
7575 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7576 #endif
7577 tcg_temp_free(t0);
7580 static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
7582 #if defined(TARGET_PPC64)
7583 TCGv t0 = tcg_temp_new();
7584 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7585 gen_addr_add(ctx, addr, addr, 2);
7586 gen_qemu_ld16u(ctx, t0, addr);
7587 tcg_gen_shli_tl(t0, t0, 32);
7588 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7589 tcg_temp_free(t0);
7590 #else
7591 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7592 gen_addr_add(ctx, addr, addr, 2);
7593 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7594 #endif
7597 static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
7599 #if defined(TARGET_PPC64)
7600 TCGv t0 = tcg_temp_new();
7601 gen_qemu_ld16s(ctx, t0, addr);
7602 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
7603 gen_addr_add(ctx, addr, addr, 2);
7604 gen_qemu_ld16s(ctx, t0, addr);
7605 tcg_gen_shli_tl(t0, t0, 32);
7606 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7607 tcg_temp_free(t0);
7608 #else
7609 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
7610 gen_addr_add(ctx, addr, addr, 2);
7611 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
7612 #endif
7615 static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
7617 TCGv t0 = tcg_temp_new();
7618 gen_qemu_ld32u(ctx, t0, addr);
7619 #if defined(TARGET_PPC64)
7620 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
7621 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7622 #else
7623 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
7624 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
7625 #endif
7626 tcg_temp_free(t0);
7629 static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
7631 TCGv t0 = tcg_temp_new();
7632 #if defined(TARGET_PPC64)
7633 gen_qemu_ld16u(ctx, t0, addr);
7634 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
7635 tcg_gen_shli_tl(t0, t0, 32);
7636 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7637 gen_addr_add(ctx, addr, addr, 2);
7638 gen_qemu_ld16u(ctx, t0, addr);
7639 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7640 tcg_gen_shli_tl(t0, t0, 16);
7641 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
7642 #else
7643 gen_qemu_ld16u(ctx, t0, addr);
7644 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
7645 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7646 gen_addr_add(ctx, addr, addr, 2);
7647 gen_qemu_ld16u(ctx, t0, addr);
7648 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
7649 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
7650 #endif
7651 tcg_temp_free(t0);
7654 static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
7656 #if defined(TARGET_PPC64)
7657 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7658 #else
7659 TCGv_i64 t0 = tcg_temp_new_i64();
7660 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
7661 gen_qemu_st64(ctx, t0, addr);
7662 tcg_temp_free_i64(t0);
7663 #endif
7666 static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
7668 #if defined(TARGET_PPC64)
7669 TCGv t0 = tcg_temp_new();
7670 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7671 gen_qemu_st32(ctx, t0, addr);
7672 tcg_temp_free(t0);
7673 #else
7674 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7675 #endif
7676 gen_addr_add(ctx, addr, addr, 4);
7677 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7680 static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
7682 TCGv t0 = tcg_temp_new();
7683 #if defined(TARGET_PPC64)
7684 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7685 #else
7686 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7687 #endif
7688 gen_qemu_st16(ctx, t0, addr);
7689 gen_addr_add(ctx, addr, addr, 2);
7690 #if defined(TARGET_PPC64)
7691 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7692 gen_qemu_st16(ctx, t0, addr);
7693 #else
7694 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7695 #endif
7696 gen_addr_add(ctx, addr, addr, 2);
7697 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7698 gen_qemu_st16(ctx, t0, addr);
7699 tcg_temp_free(t0);
7700 gen_addr_add(ctx, addr, addr, 2);
7701 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7704 static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
7706 TCGv t0 = tcg_temp_new();
7707 #if defined(TARGET_PPC64)
7708 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
7709 #else
7710 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
7711 #endif
7712 gen_qemu_st16(ctx, t0, addr);
7713 gen_addr_add(ctx, addr, addr, 2);
7714 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7715 gen_qemu_st16(ctx, t0, addr);
7716 tcg_temp_free(t0);
7719 static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7721 #if defined(TARGET_PPC64)
7722 TCGv t0 = tcg_temp_new();
7723 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7724 gen_qemu_st16(ctx, t0, addr);
7725 tcg_temp_free(t0);
7726 #else
7727 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7728 #endif
7729 gen_addr_add(ctx, addr, addr, 2);
7730 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7733 static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7735 #if defined(TARGET_PPC64)
7736 TCGv t0 = tcg_temp_new();
7737 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7738 gen_qemu_st32(ctx, t0, addr);
7739 tcg_temp_free(t0);
7740 #else
7741 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
7742 #endif
7745 static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7747 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
7750 #define GEN_SPEOP_LDST(name, opc2, sh) \
7751 static void glue(gen_, name)(DisasContext *ctx) \
7753 TCGv t0; \
7754 if (unlikely(!ctx->spe_enabled)) { \
7755 gen_exception(ctx, POWERPC_EXCP_APU); \
7756 return; \
7758 gen_set_access_type(ctx, ACCESS_INT); \
7759 t0 = tcg_temp_new(); \
7760 if (Rc(ctx->opcode)) { \
7761 gen_addr_spe_imm_index(ctx, t0, sh); \
7762 } else { \
7763 gen_addr_reg_index(ctx, t0); \
7765 gen_op_##name(ctx, t0); \
7766 tcg_temp_free(t0); \
7769 GEN_SPEOP_LDST(evldd, 0x00, 3);
7770 GEN_SPEOP_LDST(evldw, 0x01, 3);
7771 GEN_SPEOP_LDST(evldh, 0x02, 3);
7772 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7773 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7774 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7775 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7776 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7777 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7778 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7779 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7781 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7782 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7783 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7784 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7785 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7786 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7787 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7789 /* Multiply and add - TODO */
7790 #if 0
7791 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7792 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7793 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7794 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7795 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7796 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7797 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7798 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7799 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7800 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7801 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7802 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7804 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7805 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7806 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7807 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7808 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7809 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7810 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7811 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7812 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7813 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7814 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7815 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7817 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7818 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7819 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7820 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7821 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7823 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7824 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7825 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7826 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7827 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7828 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7829 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7830 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7831 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7832 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7833 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7834 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7836 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7837 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7838 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7839 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7841 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7842 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7843 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7844 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7845 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7846 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7847 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7848 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7849 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7850 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7851 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7852 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7854 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7855 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7856 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7857 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7858 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7859 #endif
7861 /*** SPE floating-point extension ***/
7862 #if defined(TARGET_PPC64)
7863 #define GEN_SPEFPUOP_CONV_32_32(name) \
7864 static inline void gen_##name(DisasContext *ctx) \
7866 TCGv_i32 t0; \
7867 TCGv t1; \
7868 t0 = tcg_temp_new_i32(); \
7869 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7870 gen_helper_##name(t0, t0); \
7871 t1 = tcg_temp_new(); \
7872 tcg_gen_extu_i32_tl(t1, t0); \
7873 tcg_temp_free_i32(t0); \
7874 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7875 0xFFFFFFFF00000000ULL); \
7876 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7877 tcg_temp_free(t1); \
7879 #define GEN_SPEFPUOP_CONV_32_64(name) \
7880 static inline void gen_##name(DisasContext *ctx) \
7882 TCGv_i32 t0; \
7883 TCGv t1; \
7884 t0 = tcg_temp_new_i32(); \
7885 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7886 t1 = tcg_temp_new(); \
7887 tcg_gen_extu_i32_tl(t1, t0); \
7888 tcg_temp_free_i32(t0); \
7889 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7890 0xFFFFFFFF00000000ULL); \
7891 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7892 tcg_temp_free(t1); \
7894 #define GEN_SPEFPUOP_CONV_64_32(name) \
7895 static inline void gen_##name(DisasContext *ctx) \
7897 TCGv_i32 t0 = tcg_temp_new_i32(); \
7898 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7899 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7900 tcg_temp_free_i32(t0); \
7902 #define GEN_SPEFPUOP_CONV_64_64(name) \
7903 static inline void gen_##name(DisasContext *ctx) \
7905 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7907 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7908 static inline void gen_##name(DisasContext *ctx) \
7910 TCGv_i32 t0, t1; \
7911 TCGv_i64 t2; \
7912 if (unlikely(!ctx->spe_enabled)) { \
7913 gen_exception(ctx, POWERPC_EXCP_APU); \
7914 return; \
7916 t0 = tcg_temp_new_i32(); \
7917 t1 = tcg_temp_new_i32(); \
7918 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7919 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7920 gen_helper_##name(t0, t0, t1); \
7921 tcg_temp_free_i32(t1); \
7922 t2 = tcg_temp_new(); \
7923 tcg_gen_extu_i32_tl(t2, t0); \
7924 tcg_temp_free_i32(t0); \
7925 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7926 0xFFFFFFFF00000000ULL); \
7927 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7928 tcg_temp_free(t2); \
7930 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7931 static inline void gen_##name(DisasContext *ctx) \
7933 if (unlikely(!ctx->spe_enabled)) { \
7934 gen_exception(ctx, POWERPC_EXCP_APU); \
7935 return; \
7937 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7938 cpu_gpr[rB(ctx->opcode)]); \
7940 #define GEN_SPEFPUOP_COMP_32(name) \
7941 static inline void gen_##name(DisasContext *ctx) \
7943 TCGv_i32 t0, t1; \
7944 if (unlikely(!ctx->spe_enabled)) { \
7945 gen_exception(ctx, POWERPC_EXCP_APU); \
7946 return; \
7948 t0 = tcg_temp_new_i32(); \
7949 t1 = tcg_temp_new_i32(); \
7950 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7951 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7952 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7953 tcg_temp_free_i32(t0); \
7954 tcg_temp_free_i32(t1); \
7956 #define GEN_SPEFPUOP_COMP_64(name) \
7957 static inline void gen_##name(DisasContext *ctx) \
7959 if (unlikely(!ctx->spe_enabled)) { \
7960 gen_exception(ctx, POWERPC_EXCP_APU); \
7961 return; \
7963 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7964 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7966 #else
7967 #define GEN_SPEFPUOP_CONV_32_32(name) \
7968 static inline void gen_##name(DisasContext *ctx) \
7970 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7972 #define GEN_SPEFPUOP_CONV_32_64(name) \
7973 static inline void gen_##name(DisasContext *ctx) \
7975 TCGv_i64 t0 = tcg_temp_new_i64(); \
7976 gen_load_gpr64(t0, rB(ctx->opcode)); \
7977 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7978 tcg_temp_free_i64(t0); \
7980 #define GEN_SPEFPUOP_CONV_64_32(name) \
7981 static inline void gen_##name(DisasContext *ctx) \
7983 TCGv_i64 t0 = tcg_temp_new_i64(); \
7984 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7985 gen_store_gpr64(rD(ctx->opcode), t0); \
7986 tcg_temp_free_i64(t0); \
7988 #define GEN_SPEFPUOP_CONV_64_64(name) \
7989 static inline void gen_##name(DisasContext *ctx) \
7991 TCGv_i64 t0 = tcg_temp_new_i64(); \
7992 gen_load_gpr64(t0, rB(ctx->opcode)); \
7993 gen_helper_##name(t0, t0); \
7994 gen_store_gpr64(rD(ctx->opcode), t0); \
7995 tcg_temp_free_i64(t0); \
7997 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7998 static inline void gen_##name(DisasContext *ctx) \
8000 if (unlikely(!ctx->spe_enabled)) { \
8001 gen_exception(ctx, POWERPC_EXCP_APU); \
8002 return; \
8004 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
8005 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8007 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8008 static inline void gen_##name(DisasContext *ctx) \
8010 TCGv_i64 t0, t1; \
8011 if (unlikely(!ctx->spe_enabled)) { \
8012 gen_exception(ctx, POWERPC_EXCP_APU); \
8013 return; \
8015 t0 = tcg_temp_new_i64(); \
8016 t1 = tcg_temp_new_i64(); \
8017 gen_load_gpr64(t0, rA(ctx->opcode)); \
8018 gen_load_gpr64(t1, rB(ctx->opcode)); \
8019 gen_helper_##name(t0, t0, t1); \
8020 gen_store_gpr64(rD(ctx->opcode), t0); \
8021 tcg_temp_free_i64(t0); \
8022 tcg_temp_free_i64(t1); \
8024 #define GEN_SPEFPUOP_COMP_32(name) \
8025 static inline void gen_##name(DisasContext *ctx) \
8027 if (unlikely(!ctx->spe_enabled)) { \
8028 gen_exception(ctx, POWERPC_EXCP_APU); \
8029 return; \
8031 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8032 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8034 #define GEN_SPEFPUOP_COMP_64(name) \
8035 static inline void gen_##name(DisasContext *ctx) \
8037 TCGv_i64 t0, t1; \
8038 if (unlikely(!ctx->spe_enabled)) { \
8039 gen_exception(ctx, POWERPC_EXCP_APU); \
8040 return; \
8042 t0 = tcg_temp_new_i64(); \
8043 t1 = tcg_temp_new_i64(); \
8044 gen_load_gpr64(t0, rA(ctx->opcode)); \
8045 gen_load_gpr64(t1, rB(ctx->opcode)); \
8046 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
8047 tcg_temp_free_i64(t0); \
8048 tcg_temp_free_i64(t1); \
8050 #endif
8052 /* Single precision floating-point vectors operations */
8053 /* Arithmetic */
8054 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
8055 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
8056 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
8057 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
8058 static inline void gen_evfsabs(DisasContext *ctx)
8060 if (unlikely(!ctx->spe_enabled)) {
8061 gen_exception(ctx, POWERPC_EXCP_APU);
8062 return;
8064 #if defined(TARGET_PPC64)
8065 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
8066 #else
8067 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
8068 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8069 #endif
8071 static inline void gen_evfsnabs(DisasContext *ctx)
8073 if (unlikely(!ctx->spe_enabled)) {
8074 gen_exception(ctx, POWERPC_EXCP_APU);
8075 return;
8077 #if defined(TARGET_PPC64)
8078 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8079 #else
8080 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8081 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8082 #endif
8084 static inline void gen_evfsneg(DisasContext *ctx)
8086 if (unlikely(!ctx->spe_enabled)) {
8087 gen_exception(ctx, POWERPC_EXCP_APU);
8088 return;
8090 #if defined(TARGET_PPC64)
8091 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
8092 #else
8093 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8094 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8095 #endif
8098 /* Conversion */
8099 GEN_SPEFPUOP_CONV_64_64(evfscfui);
8100 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
8101 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
8102 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
8103 GEN_SPEFPUOP_CONV_64_64(evfsctui);
8104 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
8105 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
8106 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
8107 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
8108 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
8110 /* Comparison */
8111 GEN_SPEFPUOP_COMP_64(evfscmpgt);
8112 GEN_SPEFPUOP_COMP_64(evfscmplt);
8113 GEN_SPEFPUOP_COMP_64(evfscmpeq);
8114 GEN_SPEFPUOP_COMP_64(evfststgt);
8115 GEN_SPEFPUOP_COMP_64(evfststlt);
8116 GEN_SPEFPUOP_COMP_64(evfststeq);
8118 /* Opcodes definitions */
8119 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8120 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8121 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE); //
8122 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE); //
8123 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8124 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8125 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8126 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8127 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8128 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8129 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8130 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE); //
8131 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8132 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE); //
8134 /* Single precision floating-point operations */
8135 /* Arithmetic */
8136 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
8137 GEN_SPEFPUOP_ARITH2_32_32(efssub);
8138 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
8139 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
8140 static inline void gen_efsabs(DisasContext *ctx)
8142 if (unlikely(!ctx->spe_enabled)) {
8143 gen_exception(ctx, POWERPC_EXCP_APU);
8144 return;
8146 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
8148 static inline void gen_efsnabs(DisasContext *ctx)
8150 if (unlikely(!ctx->spe_enabled)) {
8151 gen_exception(ctx, POWERPC_EXCP_APU);
8152 return;
8154 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8156 static inline void gen_efsneg(DisasContext *ctx)
8158 if (unlikely(!ctx->spe_enabled)) {
8159 gen_exception(ctx, POWERPC_EXCP_APU);
8160 return;
8162 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
8165 /* Conversion */
8166 GEN_SPEFPUOP_CONV_32_32(efscfui);
8167 GEN_SPEFPUOP_CONV_32_32(efscfsi);
8168 GEN_SPEFPUOP_CONV_32_32(efscfuf);
8169 GEN_SPEFPUOP_CONV_32_32(efscfsf);
8170 GEN_SPEFPUOP_CONV_32_32(efsctui);
8171 GEN_SPEFPUOP_CONV_32_32(efsctsi);
8172 GEN_SPEFPUOP_CONV_32_32(efsctuf);
8173 GEN_SPEFPUOP_CONV_32_32(efsctsf);
8174 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
8175 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
8176 GEN_SPEFPUOP_CONV_32_64(efscfd);
8178 /* Comparison */
8179 GEN_SPEFPUOP_COMP_32(efscmpgt);
8180 GEN_SPEFPUOP_COMP_32(efscmplt);
8181 GEN_SPEFPUOP_COMP_32(efscmpeq);
8182 GEN_SPEFPUOP_COMP_32(efststgt);
8183 GEN_SPEFPUOP_COMP_32(efststlt);
8184 GEN_SPEFPUOP_COMP_32(efststeq);
8186 /* Opcodes definitions */
8187 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8188 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8189 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE); //
8190 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE); //
8191 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8192 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8193 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8194 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8195 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8196 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8197 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8198 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE); //
8199 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8200 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE); //
8202 /* Double precision floating-point operations */
8203 /* Arithmetic */
8204 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
8205 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
8206 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
8207 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
8208 static inline void gen_efdabs(DisasContext *ctx)
8210 if (unlikely(!ctx->spe_enabled)) {
8211 gen_exception(ctx, POWERPC_EXCP_APU);
8212 return;
8214 #if defined(TARGET_PPC64)
8215 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
8216 #else
8217 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8218 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
8219 #endif
8221 static inline void gen_efdnabs(DisasContext *ctx)
8223 if (unlikely(!ctx->spe_enabled)) {
8224 gen_exception(ctx, POWERPC_EXCP_APU);
8225 return;
8227 #if defined(TARGET_PPC64)
8228 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8229 #else
8230 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8231 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8232 #endif
8234 static inline void gen_efdneg(DisasContext *ctx)
8236 if (unlikely(!ctx->spe_enabled)) {
8237 gen_exception(ctx, POWERPC_EXCP_APU);
8238 return;
8240 #if defined(TARGET_PPC64)
8241 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
8242 #else
8243 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8244 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
8245 #endif
8248 /* Conversion */
8249 GEN_SPEFPUOP_CONV_64_32(efdcfui);
8250 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
8251 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
8252 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
8253 GEN_SPEFPUOP_CONV_32_64(efdctui);
8254 GEN_SPEFPUOP_CONV_32_64(efdctsi);
8255 GEN_SPEFPUOP_CONV_32_64(efdctuf);
8256 GEN_SPEFPUOP_CONV_32_64(efdctsf);
8257 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
8258 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
8259 GEN_SPEFPUOP_CONV_64_32(efdcfs);
8260 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
8261 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
8262 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
8263 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
8265 /* Comparison */
8266 GEN_SPEFPUOP_COMP_64(efdcmpgt);
8267 GEN_SPEFPUOP_COMP_64(efdcmplt);
8268 GEN_SPEFPUOP_COMP_64(efdcmpeq);
8269 GEN_SPEFPUOP_COMP_64(efdtstgt);
8270 GEN_SPEFPUOP_COMP_64(efdtstlt);
8271 GEN_SPEFPUOP_COMP_64(efdtsteq);
8273 /* Opcodes definitions */
8274 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8275 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8276 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8277 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE); //
8278 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE); //
8279 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8280 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8281 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8282 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8283 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8284 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8285 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8286 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8287 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE); //
8288 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8289 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE); //
8291 static opcode_t opcodes[] = {
8292 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
8293 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
8294 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8295 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
8296 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
8297 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8298 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8299 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8300 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8301 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8302 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8303 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8304 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8305 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8306 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8307 #if defined(TARGET_PPC64)
8308 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8309 #endif
8310 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8311 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8312 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8313 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8314 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8315 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8316 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8317 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8318 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8319 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8320 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8321 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8322 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
8323 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8324 #if defined(TARGET_PPC64)
8325 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8326 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8327 #endif
8328 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8329 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8330 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8331 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8332 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8333 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8334 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8335 #if defined(TARGET_PPC64)
8336 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8337 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8338 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8339 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8340 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8341 #endif
8342 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
8343 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8344 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
8345 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
8346 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
8347 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
8348 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
8349 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
8350 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
8351 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
8352 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00010000, PPC_FLOAT),
8353 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT),
8354 #if defined(TARGET_PPC64)
8355 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8356 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8357 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8358 #endif
8359 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8360 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8361 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8362 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8363 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8364 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8365 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
8366 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8367 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8368 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8369 #if defined(TARGET_PPC64)
8370 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8371 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8372 #endif
8373 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8374 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8375 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8376 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8377 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8378 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8379 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8380 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8381 #if defined(TARGET_PPC64)
8382 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8383 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8384 #endif
8385 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
8386 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8387 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8388 #if defined(TARGET_PPC64)
8389 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8390 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8391 #endif
8392 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8393 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8394 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8395 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8396 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8397 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8398 #if defined(TARGET_PPC64)
8399 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8400 #endif
8401 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
8402 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
8403 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8404 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8405 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8406 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
8407 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
8408 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ),
8409 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT),
8410 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8411 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
8412 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8413 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8414 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8415 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8416 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8417 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8418 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8419 #if defined(TARGET_PPC64)
8420 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8421 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8422 PPC_SEGMENT_64B),
8423 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8424 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8425 PPC_SEGMENT_64B),
8426 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8427 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8428 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8429 #endif
8430 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8431 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
8432 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
8433 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8434 #if defined(TARGET_PPC64)
8435 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
8436 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8437 #endif
8438 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8439 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8440 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8441 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8442 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8443 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8444 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8445 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8446 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8447 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8448 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8449 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8450 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8451 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8452 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8453 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8454 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8455 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8456 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8457 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8458 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8459 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8460 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8461 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8462 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8463 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8464 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8465 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8466 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8467 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8468 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8469 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8470 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8471 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8472 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8473 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8474 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8475 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8476 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8477 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8478 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8479 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8480 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8481 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8482 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8483 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8484 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8485 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8486 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8487 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8488 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8489 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8490 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8491 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8492 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8493 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8494 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8495 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8496 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8497 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8498 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8499 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8500 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8501 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8502 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8503 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8504 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8505 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8506 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8507 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8508 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8509 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8510 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8511 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8512 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8513 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8514 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8515 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8516 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8517 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8518 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8519 PPC_NONE, PPC2_BOOKE206),
8520 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8521 PPC_NONE, PPC2_BOOKE206),
8522 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8523 PPC_NONE, PPC2_BOOKE206),
8524 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8525 PPC_NONE, PPC2_BOOKE206),
8526 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8527 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8528 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8529 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8530 PPC_BOOKE, PPC2_BOOKE206),
8531 GEN_HANDLER_E(msync, 0x1F, 0x16, 0x12, 0x03FFF801,
8532 PPC_BOOKE, PPC2_BOOKE206),
8533 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8534 PPC_BOOKE, PPC2_BOOKE206),
8535 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8536 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8537 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8538 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8539 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
8540 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8541 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
8542 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
8543 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
8544 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
8546 #undef GEN_INT_ARITH_ADD
8547 #undef GEN_INT_ARITH_ADD_CONST
8548 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8549 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8550 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8551 add_ca, compute_ca, compute_ov) \
8552 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8553 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8554 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8555 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8556 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8557 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8558 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8559 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8560 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8561 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8562 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8564 #undef GEN_INT_ARITH_DIVW
8565 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8566 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8567 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8568 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8569 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8570 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8572 #if defined(TARGET_PPC64)
8573 #undef GEN_INT_ARITH_DIVD
8574 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8575 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8576 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8577 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8578 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8579 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8581 #undef GEN_INT_ARITH_MUL_HELPER
8582 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8583 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8584 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8585 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8586 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8587 #endif
8589 #undef GEN_INT_ARITH_SUBF
8590 #undef GEN_INT_ARITH_SUBF_CONST
8591 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8592 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8593 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8594 add_ca, compute_ca, compute_ov) \
8595 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8596 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8597 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8598 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8599 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8600 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8601 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8602 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8603 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8604 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8605 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8607 #undef GEN_LOGICAL1
8608 #undef GEN_LOGICAL2
8609 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8610 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8611 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8612 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8613 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8614 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8615 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8616 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8617 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8618 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8619 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8620 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8621 #if defined(TARGET_PPC64)
8622 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8623 #endif
8625 #if defined(TARGET_PPC64)
8626 #undef GEN_PPC64_R2
8627 #undef GEN_PPC64_R4
8628 #define GEN_PPC64_R2(name, opc1, opc2) \
8629 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8630 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8631 PPC_64B)
8632 #define GEN_PPC64_R4(name, opc1, opc2) \
8633 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8634 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8635 PPC_64B), \
8636 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8637 PPC_64B), \
8638 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8639 PPC_64B)
8640 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8641 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8642 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8643 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8644 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8645 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8646 #endif
8648 #undef _GEN_FLOAT_ACB
8649 #undef GEN_FLOAT_ACB
8650 #undef _GEN_FLOAT_AB
8651 #undef GEN_FLOAT_AB
8652 #undef _GEN_FLOAT_AC
8653 #undef GEN_FLOAT_AC
8654 #undef GEN_FLOAT_B
8655 #undef GEN_FLOAT_BS
8656 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
8657 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
8658 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
8659 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
8660 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
8661 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8662 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8663 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
8664 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8665 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8666 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
8667 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
8668 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
8669 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
8670 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
8671 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
8672 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
8673 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
8674 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
8676 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
8677 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
8678 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
8679 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
8680 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
8681 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
8682 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
8683 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
8684 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
8685 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
8686 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
8687 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
8688 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
8689 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
8690 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
8691 #if defined(TARGET_PPC64)
8692 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
8693 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
8694 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
8695 #endif
8696 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
8697 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
8698 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
8699 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
8700 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT),
8701 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT),
8702 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT),
8704 #undef GEN_LD
8705 #undef GEN_LDU
8706 #undef GEN_LDUX
8707 #undef GEN_LDX
8708 #undef GEN_LDS
8709 #define GEN_LD(name, ldop, opc, type) \
8710 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8711 #define GEN_LDU(name, ldop, opc, type) \
8712 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8713 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8714 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8715 #define GEN_LDX(name, ldop, opc2, opc3, type) \
8716 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8717 #define GEN_LDS(name, ldop, op, type) \
8718 GEN_LD(name, ldop, op | 0x20, type) \
8719 GEN_LDU(name, ldop, op | 0x21, type) \
8720 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8721 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8723 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8724 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8725 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8726 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8727 #if defined(TARGET_PPC64)
8728 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8729 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8730 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
8731 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
8732 #endif
8733 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8734 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8736 #undef GEN_ST
8737 #undef GEN_STU
8738 #undef GEN_STUX
8739 #undef GEN_STX
8740 #undef GEN_STS
8741 #define GEN_ST(name, stop, opc, type) \
8742 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8743 #define GEN_STU(name, stop, opc, type) \
8744 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8745 #define GEN_STUX(name, stop, opc2, opc3, type) \
8746 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8747 #define GEN_STX(name, stop, opc2, opc3, type) \
8748 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8749 #define GEN_STS(name, stop, op, type) \
8750 GEN_ST(name, stop, op | 0x20, type) \
8751 GEN_STU(name, stop, op | 0x21, type) \
8752 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8753 GEN_STX(name, stop, 0x17, op | 0x00, type)
8755 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8756 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8757 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8758 #if defined(TARGET_PPC64)
8759 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
8760 GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
8761 #endif
8762 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8763 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8765 #undef GEN_LDF
8766 #undef GEN_LDUF
8767 #undef GEN_LDUXF
8768 #undef GEN_LDXF
8769 #undef GEN_LDFS
8770 #define GEN_LDF(name, ldop, opc, type) \
8771 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8772 #define GEN_LDUF(name, ldop, opc, type) \
8773 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8774 #define GEN_LDUXF(name, ldop, opc, type) \
8775 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8776 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
8777 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8778 #define GEN_LDFS(name, ldop, op, type) \
8779 GEN_LDF(name, ldop, op | 0x20, type) \
8780 GEN_LDUF(name, ldop, op | 0x21, type) \
8781 GEN_LDUXF(name, ldop, op | 0x01, type) \
8782 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
8784 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
8785 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
8787 #undef GEN_STF
8788 #undef GEN_STUF
8789 #undef GEN_STUXF
8790 #undef GEN_STXF
8791 #undef GEN_STFS
8792 #define GEN_STF(name, stop, opc, type) \
8793 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8794 #define GEN_STUF(name, stop, opc, type) \
8795 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8796 #define GEN_STUXF(name, stop, opc, type) \
8797 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
8798 #define GEN_STXF(name, stop, opc2, opc3, type) \
8799 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
8800 #define GEN_STFS(name, stop, op, type) \
8801 GEN_STF(name, stop, op | 0x20, type) \
8802 GEN_STUF(name, stop, op | 0x21, type) \
8803 GEN_STUXF(name, stop, op | 0x01, type) \
8804 GEN_STXF(name, stop, 0x17, op | 0x00, type)
8806 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
8807 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
8808 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
8810 #undef GEN_CRLOGIC
8811 #define GEN_CRLOGIC(name, tcg_op, opc) \
8812 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8813 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8814 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8815 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8816 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8817 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8818 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8819 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8820 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8822 #undef GEN_MAC_HANDLER
8823 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8824 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8825 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8826 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8827 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8828 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8829 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8830 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8831 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8832 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8833 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8834 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8835 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8836 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8837 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8838 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8839 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8840 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8841 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8842 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8843 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8844 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8845 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8846 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8847 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8848 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8849 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8850 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8851 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8852 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8853 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8854 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8855 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8856 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8857 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8858 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8859 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8860 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8861 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8862 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8863 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8864 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8865 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8866 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8868 #undef GEN_VR_LDX
8869 #undef GEN_VR_STX
8870 #undef GEN_VR_LVE
8871 #undef GEN_VR_STVE
8872 #define GEN_VR_LDX(name, opc2, opc3) \
8873 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8874 #define GEN_VR_STX(name, opc2, opc3) \
8875 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8876 #define GEN_VR_LVE(name, opc2, opc3) \
8877 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8878 #define GEN_VR_STVE(name, opc2, opc3) \
8879 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
8880 GEN_VR_LDX(lvx, 0x07, 0x03),
8881 GEN_VR_LDX(lvxl, 0x07, 0x0B),
8882 GEN_VR_LVE(bx, 0x07, 0x00),
8883 GEN_VR_LVE(hx, 0x07, 0x01),
8884 GEN_VR_LVE(wx, 0x07, 0x02),
8885 GEN_VR_STX(svx, 0x07, 0x07),
8886 GEN_VR_STX(svxl, 0x07, 0x0F),
8887 GEN_VR_STVE(bx, 0x07, 0x04),
8888 GEN_VR_STVE(hx, 0x07, 0x05),
8889 GEN_VR_STVE(wx, 0x07, 0x06),
8891 #undef GEN_VX_LOGICAL
8892 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
8893 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8894 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
8895 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
8896 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
8897 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
8898 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
8900 #undef GEN_VXFORM
8901 #define GEN_VXFORM(name, opc2, opc3) \
8902 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
8903 GEN_VXFORM(vaddubm, 0, 0),
8904 GEN_VXFORM(vadduhm, 0, 1),
8905 GEN_VXFORM(vadduwm, 0, 2),
8906 GEN_VXFORM(vsububm, 0, 16),
8907 GEN_VXFORM(vsubuhm, 0, 17),
8908 GEN_VXFORM(vsubuwm, 0, 18),
8909 GEN_VXFORM(vmaxub, 1, 0),
8910 GEN_VXFORM(vmaxuh, 1, 1),
8911 GEN_VXFORM(vmaxuw, 1, 2),
8912 GEN_VXFORM(vmaxsb, 1, 4),
8913 GEN_VXFORM(vmaxsh, 1, 5),
8914 GEN_VXFORM(vmaxsw, 1, 6),
8915 GEN_VXFORM(vminub, 1, 8),
8916 GEN_VXFORM(vminuh, 1, 9),
8917 GEN_VXFORM(vminuw, 1, 10),
8918 GEN_VXFORM(vminsb, 1, 12),
8919 GEN_VXFORM(vminsh, 1, 13),
8920 GEN_VXFORM(vminsw, 1, 14),
8921 GEN_VXFORM(vavgub, 1, 16),
8922 GEN_VXFORM(vavguh, 1, 17),
8923 GEN_VXFORM(vavguw, 1, 18),
8924 GEN_VXFORM(vavgsb, 1, 20),
8925 GEN_VXFORM(vavgsh, 1, 21),
8926 GEN_VXFORM(vavgsw, 1, 22),
8927 GEN_VXFORM(vmrghb, 6, 0),
8928 GEN_VXFORM(vmrghh, 6, 1),
8929 GEN_VXFORM(vmrghw, 6, 2),
8930 GEN_VXFORM(vmrglb, 6, 4),
8931 GEN_VXFORM(vmrglh, 6, 5),
8932 GEN_VXFORM(vmrglw, 6, 6),
8933 GEN_VXFORM(vmuloub, 4, 0),
8934 GEN_VXFORM(vmulouh, 4, 1),
8935 GEN_VXFORM(vmulosb, 4, 4),
8936 GEN_VXFORM(vmulosh, 4, 5),
8937 GEN_VXFORM(vmuleub, 4, 8),
8938 GEN_VXFORM(vmuleuh, 4, 9),
8939 GEN_VXFORM(vmulesb, 4, 12),
8940 GEN_VXFORM(vmulesh, 4, 13),
8941 GEN_VXFORM(vslb, 2, 4),
8942 GEN_VXFORM(vslh, 2, 5),
8943 GEN_VXFORM(vslw, 2, 6),
8944 GEN_VXFORM(vsrb, 2, 8),
8945 GEN_VXFORM(vsrh, 2, 9),
8946 GEN_VXFORM(vsrw, 2, 10),
8947 GEN_VXFORM(vsrab, 2, 12),
8948 GEN_VXFORM(vsrah, 2, 13),
8949 GEN_VXFORM(vsraw, 2, 14),
8950 GEN_VXFORM(vslo, 6, 16),
8951 GEN_VXFORM(vsro, 6, 17),
8952 GEN_VXFORM(vaddcuw, 0, 6),
8953 GEN_VXFORM(vsubcuw, 0, 22),
8954 GEN_VXFORM(vaddubs, 0, 8),
8955 GEN_VXFORM(vadduhs, 0, 9),
8956 GEN_VXFORM(vadduws, 0, 10),
8957 GEN_VXFORM(vaddsbs, 0, 12),
8958 GEN_VXFORM(vaddshs, 0, 13),
8959 GEN_VXFORM(vaddsws, 0, 14),
8960 GEN_VXFORM(vsububs, 0, 24),
8961 GEN_VXFORM(vsubuhs, 0, 25),
8962 GEN_VXFORM(vsubuws, 0, 26),
8963 GEN_VXFORM(vsubsbs, 0, 28),
8964 GEN_VXFORM(vsubshs, 0, 29),
8965 GEN_VXFORM(vsubsws, 0, 30),
8966 GEN_VXFORM(vrlb, 2, 0),
8967 GEN_VXFORM(vrlh, 2, 1),
8968 GEN_VXFORM(vrlw, 2, 2),
8969 GEN_VXFORM(vsl, 2, 7),
8970 GEN_VXFORM(vsr, 2, 11),
8971 GEN_VXFORM(vpkuhum, 7, 0),
8972 GEN_VXFORM(vpkuwum, 7, 1),
8973 GEN_VXFORM(vpkuhus, 7, 2),
8974 GEN_VXFORM(vpkuwus, 7, 3),
8975 GEN_VXFORM(vpkshus, 7, 4),
8976 GEN_VXFORM(vpkswus, 7, 5),
8977 GEN_VXFORM(vpkshss, 7, 6),
8978 GEN_VXFORM(vpkswss, 7, 7),
8979 GEN_VXFORM(vpkpx, 7, 12),
8980 GEN_VXFORM(vsum4ubs, 4, 24),
8981 GEN_VXFORM(vsum4sbs, 4, 28),
8982 GEN_VXFORM(vsum4shs, 4, 25),
8983 GEN_VXFORM(vsum2sws, 4, 26),
8984 GEN_VXFORM(vsumsws, 4, 30),
8985 GEN_VXFORM(vaddfp, 5, 0),
8986 GEN_VXFORM(vsubfp, 5, 1),
8987 GEN_VXFORM(vmaxfp, 5, 16),
8988 GEN_VXFORM(vminfp, 5, 17),
8990 #undef GEN_VXRFORM1
8991 #undef GEN_VXRFORM
8992 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
8993 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
8994 #define GEN_VXRFORM(name, opc2, opc3) \
8995 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
8996 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
8997 GEN_VXRFORM(vcmpequb, 3, 0)
8998 GEN_VXRFORM(vcmpequh, 3, 1)
8999 GEN_VXRFORM(vcmpequw, 3, 2)
9000 GEN_VXRFORM(vcmpgtsb, 3, 12)
9001 GEN_VXRFORM(vcmpgtsh, 3, 13)
9002 GEN_VXRFORM(vcmpgtsw, 3, 14)
9003 GEN_VXRFORM(vcmpgtub, 3, 8)
9004 GEN_VXRFORM(vcmpgtuh, 3, 9)
9005 GEN_VXRFORM(vcmpgtuw, 3, 10)
9006 GEN_VXRFORM(vcmpeqfp, 3, 3)
9007 GEN_VXRFORM(vcmpgefp, 3, 7)
9008 GEN_VXRFORM(vcmpgtfp, 3, 11)
9009 GEN_VXRFORM(vcmpbfp, 3, 15)
9011 #undef GEN_VXFORM_SIMM
9012 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
9013 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9014 GEN_VXFORM_SIMM(vspltisb, 6, 12),
9015 GEN_VXFORM_SIMM(vspltish, 6, 13),
9016 GEN_VXFORM_SIMM(vspltisw, 6, 14),
9018 #undef GEN_VXFORM_NOA
9019 #define GEN_VXFORM_NOA(name, opc2, opc3) \
9020 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9021 GEN_VXFORM_NOA(vupkhsb, 7, 8),
9022 GEN_VXFORM_NOA(vupkhsh, 7, 9),
9023 GEN_VXFORM_NOA(vupklsb, 7, 10),
9024 GEN_VXFORM_NOA(vupklsh, 7, 11),
9025 GEN_VXFORM_NOA(vupkhpx, 7, 13),
9026 GEN_VXFORM_NOA(vupklpx, 7, 15),
9027 GEN_VXFORM_NOA(vrefp, 5, 4),
9028 GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
9029 GEN_VXFORM_NOA(vexptefp, 5, 6),
9030 GEN_VXFORM_NOA(vlogefp, 5, 7),
9031 GEN_VXFORM_NOA(vrfim, 5, 8),
9032 GEN_VXFORM_NOA(vrfin, 5, 9),
9033 GEN_VXFORM_NOA(vrfip, 5, 10),
9034 GEN_VXFORM_NOA(vrfiz, 5, 11),
9036 #undef GEN_VXFORM_UIMM
9037 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
9038 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9039 GEN_VXFORM_UIMM(vspltb, 6, 8),
9040 GEN_VXFORM_UIMM(vsplth, 6, 9),
9041 GEN_VXFORM_UIMM(vspltw, 6, 10),
9042 GEN_VXFORM_UIMM(vcfux, 5, 12),
9043 GEN_VXFORM_UIMM(vcfsx, 5, 13),
9044 GEN_VXFORM_UIMM(vctuxs, 5, 14),
9045 GEN_VXFORM_UIMM(vctsxs, 5, 15),
9047 #undef GEN_VAFORM_PAIRED
9048 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9049 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9050 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
9051 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
9052 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
9053 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
9054 GEN_VAFORM_PAIRED(vsel, vperm, 21),
9055 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
9057 #undef GEN_SPE
9058 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
9059 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type)
9060 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE),
9061 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE),
9062 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE),
9063 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE),
9064 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE),
9065 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE),
9066 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE),
9067 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE),
9068 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, PPC_SPE),
9069 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE),
9070 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE),
9071 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE),
9072 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE),
9073 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE),
9074 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE),
9075 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE),
9076 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE),
9077 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE),
9078 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE),
9079 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE),
9080 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE),
9081 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE),
9082 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE),
9083 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE),
9084 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE),
9085 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE),
9086 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE),
9087 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE),
9088 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE),
9090 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9091 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9092 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPE_SINGLE),
9093 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPE_SINGLE),
9094 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9095 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9096 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9097 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9098 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9099 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9100 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9101 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPE_SINGLE),
9102 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9103 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPE_SINGLE),
9105 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9106 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9107 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPE_SINGLE),
9108 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPE_SINGLE),
9109 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9110 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9111 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9112 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9113 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9114 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9115 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9116 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPE_SINGLE),
9117 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9118 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPE_SINGLE),
9120 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9121 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9122 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9123 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPE_DOUBLE),
9124 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPE_DOUBLE),
9125 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9126 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9127 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9128 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9129 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9130 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9131 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9132 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9133 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPE_DOUBLE),
9134 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9135 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPE_DOUBLE),
9137 #undef GEN_SPEOP_LDST
9138 #define GEN_SPEOP_LDST(name, opc2, sh) \
9139 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
9140 GEN_SPEOP_LDST(evldd, 0x00, 3),
9141 GEN_SPEOP_LDST(evldw, 0x01, 3),
9142 GEN_SPEOP_LDST(evldh, 0x02, 3),
9143 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
9144 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
9145 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
9146 GEN_SPEOP_LDST(evlwhe, 0x08, 2),
9147 GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
9148 GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
9149 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
9150 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
9152 GEN_SPEOP_LDST(evstdd, 0x10, 3),
9153 GEN_SPEOP_LDST(evstdw, 0x11, 3),
9154 GEN_SPEOP_LDST(evstdh, 0x12, 3),
9155 GEN_SPEOP_LDST(evstwhe, 0x18, 2),
9156 GEN_SPEOP_LDST(evstwho, 0x1A, 2),
9157 GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
9158 GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
9161 #include "translate_init.c"
9162 #include "helper_regs.h"
9164 /*****************************************************************************/
9165 /* Misc PowerPC helpers */
9166 void cpu_dump_state (CPUState *env, FILE *f, fprintf_function cpu_fprintf,
9167 int flags)
9169 #define RGPL 4
9170 #define RFPL 4
9172 int i;
9174 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
9175 TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
9176 env->nip, env->lr, env->ctr, env->xer);
9177 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
9178 TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
9179 env->hflags, env->mmu_idx);
9180 #if !defined(NO_TIMER_DUMP)
9181 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
9182 #if !defined(CONFIG_USER_ONLY)
9183 " DECR %08" PRIu32
9184 #endif
9185 "\n",
9186 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
9187 #if !defined(CONFIG_USER_ONLY)
9188 , cpu_ppc_load_decr(env)
9189 #endif
9191 #endif
9192 for (i = 0; i < 32; i++) {
9193 if ((i & (RGPL - 1)) == 0)
9194 cpu_fprintf(f, "GPR%02d", i);
9195 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
9196 if ((i & (RGPL - 1)) == (RGPL - 1))
9197 cpu_fprintf(f, "\n");
9199 cpu_fprintf(f, "CR ");
9200 for (i = 0; i < 8; i++)
9201 cpu_fprintf(f, "%01x", env->crf[i]);
9202 cpu_fprintf(f, " [");
9203 for (i = 0; i < 8; i++) {
9204 char a = '-';
9205 if (env->crf[i] & 0x08)
9206 a = 'L';
9207 else if (env->crf[i] & 0x04)
9208 a = 'G';
9209 else if (env->crf[i] & 0x02)
9210 a = 'E';
9211 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
9213 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
9214 env->reserve_addr);
9215 for (i = 0; i < 32; i++) {
9216 if ((i & (RFPL - 1)) == 0)
9217 cpu_fprintf(f, "FPR%02d", i);
9218 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
9219 if ((i & (RFPL - 1)) == (RFPL - 1))
9220 cpu_fprintf(f, "\n");
9222 cpu_fprintf(f, "FPSCR %08x\n", env->fpscr);
9223 #if !defined(CONFIG_USER_ONLY)
9224 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
9225 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
9226 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
9227 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
9229 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
9230 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
9231 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
9232 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
9234 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
9235 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
9236 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
9237 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
9239 if (env->excp_model == POWERPC_EXCP_BOOKE) {
9240 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
9241 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
9242 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
9243 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
9245 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
9246 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
9247 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
9248 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
9250 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
9251 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
9252 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
9253 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
9255 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
9256 " EPR " TARGET_FMT_lx "\n",
9257 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
9258 env->spr[SPR_BOOKE_EPR]);
9260 /* FSL-specific */
9261 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
9262 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
9263 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
9264 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
9267 * IVORs are left out as they are large and do not change often --
9268 * they can be read with "p $ivor0", "p $ivor1", etc.
9272 switch (env->mmu_model) {
9273 case POWERPC_MMU_32B:
9274 case POWERPC_MMU_601:
9275 case POWERPC_MMU_SOFT_6xx:
9276 case POWERPC_MMU_SOFT_74xx:
9277 #if defined(TARGET_PPC64)
9278 case POWERPC_MMU_620:
9279 case POWERPC_MMU_64B:
9280 #endif
9281 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx "\n", env->spr[SPR_SDR1]);
9282 break;
9283 case POWERPC_MMU_BOOKE206:
9284 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
9285 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
9286 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
9287 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
9289 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
9290 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
9291 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
9292 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
9294 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
9295 " TLB1CFG " TARGET_FMT_lx "\n",
9296 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
9297 env->spr[SPR_BOOKE_TLB1CFG]);
9298 break;
9299 default:
9300 break;
9302 #endif
9304 #undef RGPL
9305 #undef RFPL
9308 void cpu_dump_statistics (CPUState *env, FILE*f, fprintf_function cpu_fprintf,
9309 int flags)
9311 #if defined(DO_PPC_STATISTICS)
9312 opc_handler_t **t1, **t2, **t3, *handler;
9313 int op1, op2, op3;
9315 t1 = env->opcodes;
9316 for (op1 = 0; op1 < 64; op1++) {
9317 handler = t1[op1];
9318 if (is_indirect_opcode(handler)) {
9319 t2 = ind_table(handler);
9320 for (op2 = 0; op2 < 32; op2++) {
9321 handler = t2[op2];
9322 if (is_indirect_opcode(handler)) {
9323 t3 = ind_table(handler);
9324 for (op3 = 0; op3 < 32; op3++) {
9325 handler = t3[op3];
9326 if (handler->count == 0)
9327 continue;
9328 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
9329 "%016" PRIx64 " %" PRId64 "\n",
9330 op1, op2, op3, op1, (op3 << 5) | op2,
9331 handler->oname,
9332 handler->count, handler->count);
9334 } else {
9335 if (handler->count == 0)
9336 continue;
9337 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
9338 "%016" PRIx64 " %" PRId64 "\n",
9339 op1, op2, op1, op2, handler->oname,
9340 handler->count, handler->count);
9343 } else {
9344 if (handler->count == 0)
9345 continue;
9346 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
9347 " %" PRId64 "\n",
9348 op1, op1, handler->oname,
9349 handler->count, handler->count);
9352 #endif
9355 /*****************************************************************************/
9356 static inline void gen_intermediate_code_internal(CPUState *env,
9357 TranslationBlock *tb,
9358 int search_pc)
9360 DisasContext ctx, *ctxp = &ctx;
9361 opc_handler_t **table, *handler;
9362 target_ulong pc_start;
9363 uint16_t *gen_opc_end;
9364 CPUBreakpoint *bp;
9365 int j, lj = -1;
9366 int num_insns;
9367 int max_insns;
9369 pc_start = tb->pc;
9370 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
9371 ctx.nip = pc_start;
9372 ctx.tb = tb;
9373 ctx.exception = POWERPC_EXCP_NONE;
9374 ctx.spr_cb = env->spr_cb;
9375 ctx.mem_idx = env->mmu_idx;
9376 ctx.access_type = -1;
9377 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
9378 #if defined(TARGET_PPC64)
9379 ctx.sf_mode = msr_sf;
9380 #endif
9381 ctx.fpu_enabled = msr_fp;
9382 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
9383 ctx.spe_enabled = msr_spe;
9384 else
9385 ctx.spe_enabled = 0;
9386 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
9387 ctx.altivec_enabled = msr_vr;
9388 else
9389 ctx.altivec_enabled = 0;
9390 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
9391 ctx.singlestep_enabled = CPU_SINGLE_STEP;
9392 else
9393 ctx.singlestep_enabled = 0;
9394 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
9395 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
9396 if (unlikely(env->singlestep_enabled))
9397 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9398 #if defined (DO_SINGLE_STEP) && 0
9399 /* Single step trace mode */
9400 msr_se = 1;
9401 #endif
9402 num_insns = 0;
9403 max_insns = tb->cflags & CF_COUNT_MASK;
9404 if (max_insns == 0)
9405 max_insns = CF_COUNT_MASK;
9407 gen_icount_start();
9408 /* Set env in case of segfault during code fetch */
9409 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
9410 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
9411 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
9412 if (bp->pc == ctx.nip) {
9413 gen_debug_exception(ctxp);
9414 break;
9418 if (unlikely(search_pc)) {
9419 j = gen_opc_ptr - gen_opc_buf;
9420 if (lj < j) {
9421 lj++;
9422 while (lj < j)
9423 gen_opc_instr_start[lj++] = 0;
9425 gen_opc_pc[lj] = ctx.nip;
9426 gen_opc_instr_start[lj] = 1;
9427 gen_opc_icount[lj] = num_insns;
9429 LOG_DISAS("----------------\n");
9430 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9431 ctx.nip, ctx.mem_idx, (int)msr_ir);
9432 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
9433 gen_io_start();
9434 if (unlikely(ctx.le_mode)) {
9435 ctx.opcode = bswap32(ldl_code(ctx.nip));
9436 } else {
9437 ctx.opcode = ldl_code(ctx.nip);
9439 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
9440 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
9441 opc3(ctx.opcode), little_endian ? "little" : "big");
9442 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
9443 tcg_gen_debug_insn_start(ctx.nip);
9444 ctx.nip += 4;
9445 table = env->opcodes;
9446 num_insns++;
9447 handler = table[opc1(ctx.opcode)];
9448 if (is_indirect_opcode(handler)) {
9449 table = ind_table(handler);
9450 handler = table[opc2(ctx.opcode)];
9451 if (is_indirect_opcode(handler)) {
9452 table = ind_table(handler);
9453 handler = table[opc3(ctx.opcode)];
9456 /* Is opcode *REALLY* valid ? */
9457 if (unlikely(handler->handler == &gen_invalid)) {
9458 if (qemu_log_enabled()) {
9459 qemu_log("invalid/unsupported opcode: "
9460 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
9461 opc1(ctx.opcode), opc2(ctx.opcode),
9462 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
9464 } else {
9465 if (unlikely((ctx.opcode & handler->inval) != 0)) {
9466 if (qemu_log_enabled()) {
9467 qemu_log("invalid bits: %08x for opcode: "
9468 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
9469 ctx.opcode & handler->inval, opc1(ctx.opcode),
9470 opc2(ctx.opcode), opc3(ctx.opcode),
9471 ctx.opcode, ctx.nip - 4);
9473 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
9474 break;
9477 (*(handler->handler))(&ctx);
9478 #if defined(DO_PPC_STATISTICS)
9479 handler->count++;
9480 #endif
9481 /* Check trace mode exceptions */
9482 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
9483 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
9484 ctx.exception != POWERPC_SYSCALL &&
9485 ctx.exception != POWERPC_EXCP_TRAP &&
9486 ctx.exception != POWERPC_EXCP_BRANCH)) {
9487 gen_exception(ctxp, POWERPC_EXCP_TRACE);
9488 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
9489 (env->singlestep_enabled) ||
9490 singlestep ||
9491 num_insns >= max_insns)) {
9492 /* if we reach a page boundary or are single stepping, stop
9493 * generation
9495 break;
9498 if (tb->cflags & CF_LAST_IO)
9499 gen_io_end();
9500 if (ctx.exception == POWERPC_EXCP_NONE) {
9501 gen_goto_tb(&ctx, 0, ctx.nip);
9502 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
9503 if (unlikely(env->singlestep_enabled)) {
9504 gen_debug_exception(ctxp);
9506 /* Generate the return instruction */
9507 tcg_gen_exit_tb(0);
9509 gen_icount_end(tb, num_insns);
9510 *gen_opc_ptr = INDEX_op_end;
9511 if (unlikely(search_pc)) {
9512 j = gen_opc_ptr - gen_opc_buf;
9513 lj++;
9514 while (lj <= j)
9515 gen_opc_instr_start[lj++] = 0;
9516 } else {
9517 tb->size = ctx.nip - pc_start;
9518 tb->icount = num_insns;
9520 #if defined(DEBUG_DISAS)
9521 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
9522 int flags;
9523 flags = env->bfd_mach;
9524 flags |= ctx.le_mode << 16;
9525 qemu_log("IN: %s\n", lookup_symbol(pc_start));
9526 log_target_disas(pc_start, ctx.nip - pc_start, flags);
9527 qemu_log("\n");
9529 #endif
9532 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
9534 gen_intermediate_code_internal(env, tb, 0);
9537 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
9539 gen_intermediate_code_internal(env, tb, 1);
9542 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
9544 env->nip = gen_opc_pc[pc_pos];